mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-25 20:00:14 -05:00
Compare commits
258 Commits
executor_v
...
fix/load-i
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
14a1a6c104 | ||
|
|
35bce0b5b8 | ||
|
|
e2b46c4cce | ||
|
|
49264cb7f9 | ||
|
|
82e139de03 | ||
|
|
7d64692f0f | ||
|
|
4bf469c992 | ||
|
|
71e49f9eb5 | ||
|
|
76e9b2269f | ||
|
|
c3f34e1c2a | ||
|
|
e01e8d8fe0 | ||
|
|
7c5092f281 | ||
|
|
e025677e70 | ||
|
|
a3195c8e64 | ||
|
|
9920d190c8 | ||
|
|
2b29a36850 | ||
|
|
07e90de835 | ||
|
|
1c097209ac | ||
|
|
ca70743329 | ||
|
|
5d2c93b232 | ||
|
|
bc7291b8e3 | ||
|
|
c06ffb3063 | ||
|
|
7c89eec500 | ||
|
|
45592597e7 | ||
|
|
313fda153a | ||
|
|
6c3bbcea4d | ||
|
|
53b46f11aa | ||
|
|
9396e73f5a | ||
|
|
d02b6b0470 | ||
|
|
bdfd324a7d | ||
|
|
551f6fe033 | ||
|
|
7a0b3843e1 | ||
|
|
d713f2753b | ||
|
|
bc27e0ea9e | ||
|
|
08f4b2ea22 | ||
|
|
b64168f115 | ||
|
|
b23aa3eb1a | ||
|
|
70b5c03fb2 | ||
|
|
094802dd85 | ||
|
|
d9144c8c4f | ||
|
|
b18d304b77 | ||
|
|
c38cac5a9d | ||
|
|
4ed44754ab | ||
|
|
e62baaabe4 | ||
|
|
efac416863 | ||
|
|
d26956fc89 | ||
|
|
03a5c52445 | ||
|
|
290e0c5ded | ||
|
|
1c0e0fd926 | ||
|
|
9042e86f12 | ||
|
|
c6be8798d6 | ||
|
|
452ac83b01 | ||
|
|
3dd198f036 | ||
|
|
228863d91a | ||
|
|
8b17a7c36d | ||
|
|
55a8896181 | ||
|
|
fc600cc1e3 | ||
|
|
fa23081207 | ||
|
|
2b04192d1b | ||
|
|
b7fbdf8aed | ||
|
|
5a95fcf1ff | ||
|
|
558ca24dac | ||
|
|
1ffc60fe07 | ||
|
|
4cdbb5f57e | ||
|
|
3f27645b3c | ||
|
|
a897618108 | ||
|
|
cb9662cbd7 | ||
|
|
c60be5c9f8 | ||
|
|
ec74c1ae51 | ||
|
|
ded9e8c13a | ||
|
|
fcb2d18beb | ||
|
|
c3bc919891 | ||
|
|
03542e91f3 | ||
|
|
958ee1ef8a | ||
|
|
a27348b872 | ||
|
|
36aedec8f0 | ||
|
|
9499cfc955 | ||
|
|
d3d14a252b | ||
|
|
425af2a530 | ||
|
|
0bae8cdbe9 | ||
|
|
b9a5a74674 | ||
|
|
222fae2a22 | ||
|
|
4502c52d2b | ||
|
|
153ac27040 | ||
|
|
6361a02deb | ||
|
|
163e1e2c8b | ||
|
|
07b5e89a2f | ||
|
|
a3ff8f5c2b | ||
|
|
4cd369e44d | ||
|
|
364540c45a | ||
|
|
65b8958fe8 | ||
|
|
e9be141463 | ||
|
|
69804790fb | ||
|
|
4a524196d4 | ||
|
|
eeddfc7b1e | ||
|
|
9f35f05188 | ||
|
|
3984e92004 | ||
|
|
78c01999ad | ||
|
|
ad13a64ccc | ||
|
|
b4017e96c3 | ||
|
|
b12b64fa40 | ||
|
|
5b3ebae8e7 | ||
|
|
516b1fb1c3 | ||
|
|
80befa98e9 | ||
|
|
322532a955 | ||
|
|
70ad7b5fa2 | ||
|
|
1e14f92d6f | ||
|
|
fb4e2ca950 | ||
|
|
ed352f8a2e | ||
|
|
bd8670e9a5 | ||
|
|
1e1b954d0a | ||
|
|
4c636578ac | ||
|
|
0d1ccb2910 | ||
|
|
edc4abc80e | ||
|
|
ddf5690325 | ||
|
|
25fcf9695a | ||
|
|
920c614cc0 | ||
|
|
1dc18fdb66 | ||
|
|
86c7b2f6ae | ||
|
|
296ddb3b19 | ||
|
|
f3befd174c | ||
|
|
d09ce90be4 | ||
|
|
87e059a76b | ||
|
|
e58b271824 | ||
|
|
c1c46da324 | ||
|
|
de6abc7650 | ||
|
|
6da0a74ac7 | ||
|
|
df755361e1 | ||
|
|
918c026781 | ||
|
|
e03b1dbcbb | ||
|
|
25acd73de0 | ||
|
|
68ee7b80a0 | ||
|
|
893e8c1a49 | ||
|
|
f0ba570c3d | ||
|
|
c2ab63ceba | ||
|
|
7a126d71e5 | ||
|
|
453477ecb9 | ||
|
|
3f83aaa437 | ||
|
|
1ca8264391 | ||
|
|
832378af07 | ||
|
|
e9c96d4f5b | ||
|
|
0b5e6c25ed | ||
|
|
991de1a0d9 | ||
|
|
a8ac968afd | ||
|
|
2ce7841aa3 | ||
|
|
999804f474 | ||
|
|
58fd6c1c48 | ||
|
|
85dc3ec788 | ||
|
|
c6e7ff9436 | ||
|
|
6e7d6de2e2 | ||
|
|
01d79f34a4 | ||
|
|
bddb8fef89 | ||
|
|
24e2f5a0f6 | ||
|
|
aee3854155 | ||
|
|
1771955717 | ||
|
|
7c7d606b48 | ||
|
|
154f380860 | ||
|
|
6e3c4f47cc | ||
|
|
7e68274cf4 | ||
|
|
1d58f3be34 | ||
|
|
becd1256db | ||
|
|
1ce9d710b6 | ||
|
|
93de36b25b | ||
|
|
213b4ed1f3 | ||
|
|
832c6eb313 | ||
|
|
51e55a2543 | ||
|
|
6e13dfa009 | ||
|
|
2b3df66406 | ||
|
|
2c024c2586 | ||
|
|
da39dbca01 | ||
|
|
693f582314 | ||
|
|
095def6024 | ||
|
|
8531ed78bc | ||
|
|
cef79689be | ||
|
|
1e0eb180a6 | ||
|
|
b704a55a39 | ||
|
|
8e8af2ecf8 | ||
|
|
4b7baba605 | ||
|
|
63b887c9ed | ||
|
|
3faee2c84b | ||
|
|
ff11ff9006 | ||
|
|
dd7892ef28 | ||
|
|
b23fdc2376 | ||
|
|
f347cea28b | ||
|
|
0b08d614c1 | ||
|
|
d92fd0040a | ||
|
|
004a85f701 | ||
|
|
f9f3b004d7 | ||
|
|
35799a2e01 | ||
|
|
d428609c61 | ||
|
|
5f26f72a81 | ||
|
|
63ef33bd80 | ||
|
|
2cc6adfd88 | ||
|
|
f5df4c1bf6 | ||
|
|
3ab993a43a | ||
|
|
4f7d762705 | ||
|
|
5b8eb77fe4 | ||
|
|
897f2fedd7 | ||
|
|
2c5f34a2df | ||
|
|
6473a48655 | ||
|
|
0ebbc13301 | ||
|
|
474276e6ce | ||
|
|
4d8b737b39 | ||
|
|
abe1509ccf | ||
|
|
0a13d378f4 | ||
|
|
4ec8306976 | ||
|
|
9c4656714a | ||
|
|
86a1fa7f82 | ||
|
|
2d030be434 | ||
|
|
e89d209a8a | ||
|
|
7a0d388ed6 | ||
|
|
f69594d6db | ||
|
|
3e4eed3306 | ||
|
|
f7031ec596 | ||
|
|
ef76d6cf9f | ||
|
|
3f64e42daf | ||
|
|
67fa06fa4e | ||
|
|
c965a112e6 | ||
|
|
c97033c25c | ||
|
|
caffb3bc74 | ||
|
|
d15ffd6c52 | ||
|
|
4909af97fb | ||
|
|
af9ab4adc6 | ||
|
|
ea6b1e9082 | ||
|
|
3c386ad883 | ||
|
|
acc0fa6af3 | ||
|
|
40eca75f77 | ||
|
|
1b4d7ca514 | ||
|
|
23ccf0360a | ||
|
|
a9301faf97 | ||
|
|
2eb947d582 | ||
|
|
eaa178c219 | ||
|
|
3689042757 | ||
|
|
a4f257b6ea | ||
|
|
33628107c3 | ||
|
|
5ec869b1cc | ||
|
|
7896c96f24 | ||
|
|
ec1ca232b0 | ||
|
|
4227ce8fc5 | ||
|
|
12fd7f81c0 | ||
|
|
3cd340f972 | ||
|
|
721dc61aa4 | ||
|
|
d12a33e9ba | ||
|
|
56caaa2a91 | ||
|
|
52dda7621c | ||
|
|
88ab8e2a71 | ||
|
|
545ed57000 | ||
|
|
31de6660fa | ||
|
|
db1ef67a69 | ||
|
|
b7fbb3af66 | ||
|
|
50f412a11e | ||
|
|
5bced31e1b | ||
|
|
76d349d57e | ||
|
|
63df8e3e46 | ||
|
|
4e4e082b79 | ||
|
|
7b67f9a0f5 | ||
|
|
b59098e61f | ||
|
|
43f02e7e33 |
@@ -32,11 +32,6 @@ In the meantime, you can move onto the next step...
|
||||
|
||||
### Development:
|
||||
|
||||
- (Optional) By default, your dev server will target `localhost:8080`. If your backend is running elsewhere, you can create `.env.development.local` under `ui` folder with this content:
|
||||
```
|
||||
VITE_APP_API_URL={myApiUrl}
|
||||
```
|
||||
|
||||
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
|
||||
|
||||
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.
|
||||
|
||||
2
.github/CONTRIBUTING.md
vendored
2
.github/CONTRIBUTING.md
vendored
@@ -32,7 +32,7 @@ Watch out for duplicates! If you are creating a new issue, please check existing
|
||||
#### Requirements
|
||||
The following dependencies are required to build Kestra locally:
|
||||
- Java 21+
|
||||
- Node 18+ and npm
|
||||
- Node 22+ and npm 10+
|
||||
- Python 3, pip and python venv
|
||||
- Docker & Docker Compose
|
||||
- an IDE (Intellij IDEA, Eclipse or VS Code)
|
||||
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -26,6 +26,10 @@ updates:
|
||||
open-pull-requests-limit: 50
|
||||
labels:
|
||||
- "dependency-upgrade"
|
||||
ignore:
|
||||
- dependency-name: "com.google.protobuf:*"
|
||||
# Ignore versions of Protobuf that are equal to or greater than 4.0.0 as Orc still uses 3
|
||||
versions: [ "[4,)" ]
|
||||
|
||||
# Maintain dependencies for NPM modules
|
||||
- package-ecosystem: "npm"
|
||||
|
||||
4
.github/workflows/auto-translate-ui-keys.yml
vendored
4
.github/workflows/auto-translate-ui-keys.yml
vendored
@@ -2,7 +2,7 @@ name: Auto-Translate UI keys and create PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9-21/3 * * *" # Every 3 hours from 9 AM to 9 PM
|
||||
- cron: "0 9-21/3 * * 1-5" # Every 3 hours from 9 AM to 9 PM, Monday to Friday
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
retranslate_modified_keys:
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20.x"
|
||||
|
||||
|
||||
13
.github/workflows/main-build.yml
vendored
13
.github/workflows/main-build.yml
vendored
@@ -68,20 +68,23 @@ jobs:
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [backend-tests, frontend-tests, publish-develop-docker, publish-develop-maven]
|
||||
if: always()
|
||||
if: "always() && github.repository == 'kestra-io/kestra'"
|
||||
steps:
|
||||
- run: echo "debug repo ${{github.repository}} ref ${{github.ref}} res ${{needs.publish-develop-maven.result}} jobStatus ${{job.status}} isNotFork ${{github.repository == 'kestra-io/kestra'}} isDevelop ${{github.ref == 'refs/heads/develop'}}"
|
||||
- run: echo "end CI of failed or success"
|
||||
- name: Trigger EE Workflow
|
||||
uses: peter-evans/repository-dispatch@v4
|
||||
if: github.ref == 'refs/heads/develop' && needs.publish-develop-maven == 'success'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4
|
||||
if: "!contains(needs.*.result, 'failure') && github.ref == 'refs/heads/develop'"
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/kestra-ee
|
||||
event-type: "oss-updated"
|
||||
|
||||
# Slack
|
||||
- run: echo "mark job as failure to forward error to Slack action" && exit 1
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
- name: Slack - Notification
|
||||
if: ${{ failure() && github.repository == 'kestra-io/kestra' && (github.ref == 'refs/heads/develop') }}
|
||||
if: ${{ always() && contains(needs.*.result, 'failure') }}
|
||||
uses: kestra-io/actions/composite/slack-status@main
|
||||
with:
|
||||
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
channel: 'C09FF36GKE1'
|
||||
|
||||
11
.github/workflows/pre-release.yml
vendored
11
.github/workflows/pre-release.yml
vendored
@@ -5,6 +5,15 @@ on:
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip-test:
|
||||
description: 'Skip test'
|
||||
type: choice
|
||||
required: true
|
||||
default: 'false'
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
@@ -14,6 +23,7 @@ jobs:
|
||||
backend-tests:
|
||||
name: Backend tests
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -23,6 +33,7 @@ jobs:
|
||||
frontend-tests:
|
||||
name: Frontend tests
|
||||
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
6
.github/workflows/vulnerabilities-check.yml
vendored
6
.github/workflows/vulnerabilities-check.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
|
||||
# Upload dependency check report
|
||||
- name: Upload dependency check report
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: dependency-check-report
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.33.1
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
|
||||
with:
|
||||
image-ref: kestra/kestra:develop
|
||||
format: 'template'
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
|
||||
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
|
||||
- name: Docker Vulnerabilities Check
|
||||
uses: aquasecurity/trivy-action@0.33.1
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
|
||||
with:
|
||||
image-ref: kestra/kestra:latest
|
||||
format: table
|
||||
|
||||
1
.plugins
1
.plugins
@@ -66,6 +66,7 @@
|
||||
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
|
||||
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
|
||||
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
|
||||
#plugin-jms:io.kestra.plugin:plugin-jms:LATEST
|
||||
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
|
||||
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
|
||||
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST
|
||||
|
||||
@@ -68,6 +68,12 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Launch on AWS (CloudFormation)
|
||||
|
||||
Deploy Kestra on AWS using our CloudFormation template:
|
||||
|
||||
[](https://console.aws.amazon.com/cloudformation/home#/stacks/create/review?templateURL=https://kestra-deployment-templates.s3.eu-west-3.amazonaws.com/aws/cloudformation/ec2-rds-s3/kestra-oss.yaml&stackName=kestra-oss)
|
||||
|
||||
### Get Started Locally in 5 Minutes
|
||||
|
||||
#### Launch Kestra in Docker
|
||||
@@ -98,7 +104,7 @@ If you're on Windows and use WSL (Linux-based environment in Windows):
|
||||
```bash
|
||||
docker run --pull=always --rm -it -p 8080:8080 --user=root \
|
||||
-v "/var/run/docker.sock:/var/run/docker.sock" \
|
||||
-v "C:/Temp:/tmp" kestra/kestra:latest server local
|
||||
-v "/mnt/c/Temp:/tmp" kestra/kestra:latest server local
|
||||
```
|
||||
|
||||
Check our [Installation Guide](https://kestra.io/docs/installation) for other deployment options (Docker Compose, Podman, Kubernetes, AWS, GCP, Azure, and more).
|
||||
|
||||
@@ -21,7 +21,7 @@ plugins {
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "6.3.1.5724"
|
||||
id "org.sonarqube" version "7.0.1.6134"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
@@ -37,7 +37,7 @@ plugins {
|
||||
id "com.vanniktech.maven.publish" version "0.34.0"
|
||||
|
||||
// OWASP dependency check
|
||||
id "org.owasp.dependencycheck" version "12.1.6" apply false
|
||||
id "org.owasp.dependencycheck" version "12.1.8" apply false
|
||||
}
|
||||
|
||||
idea {
|
||||
@@ -372,7 +372,7 @@ tasks.named('testCodeCoverageReport') {
|
||||
subprojects {
|
||||
sonar {
|
||||
properties {
|
||||
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml,$projectDir.parentFile.path/build/reports/jacoco/test//testCodeCoverageReport.xml"
|
||||
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml,$projectDir.parentFile.path/build/reports/jacoco/test/testCodeCoverageReport.xml"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/validate", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/validate", tenantService.getTenantIdAndAllowEETenants(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<ValidateConstraintViolation> validations = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -24,7 +24,8 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
private FlowService flowService;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
private TenantIdSelectorService tenantIdSelectorService;
|
||||
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -39,7 +40,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow, tenantService.getTenantId(tenantId)));
|
||||
warnings.addAll(flowService.warnings(flow, tenantIdSelectorService.getTenantIdAndAllowEETenants(tenantId)));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
|
||||
@@ -64,7 +64,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
|
||||
}
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<String> request = HttpRequest
|
||||
.POST(apiUri("/flows/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
.POST(apiUri("/flows/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "metadata",
|
||||
description = "populate metadata for entities"
|
||||
)
|
||||
@Slf4j
|
||||
public class MetadataMigrationCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private MetadataMigrationService metadataMigrationService;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
int returnCode = metadataMigrationService.migrateMetadata();
|
||||
if (returnCode == 0) {
|
||||
System.out.println("✅ Metadata migration complete.");
|
||||
}
|
||||
return returnCode;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.core.models.kv.PersistedKvMetadata;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
|
||||
import io.kestra.core.storages.FileAttributes;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.kv.InternalKVStore;
|
||||
import io.kestra.core.storages.kv.KVEntry;
|
||||
import io.kestra.core.tenant.TenantService;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.time.Instant;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
@Singleton
|
||||
public class MetadataMigrationService {
|
||||
@Inject
|
||||
private TenantService tenantService;
|
||||
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
private KvMetadataRepositoryInterface kvMetadataRepository;
|
||||
|
||||
@Inject
|
||||
private StorageInterface storageInterface;
|
||||
|
||||
protected Map<String, List<String>> namespacesPerTenant() {
|
||||
String tenantId = tenantService.resolveTenant();
|
||||
return Map.of(tenantId, flowRepository.findDistinctNamespace(tenantId));
|
||||
}
|
||||
|
||||
public int migrateMetadata() {
|
||||
try {
|
||||
kvMigration();
|
||||
} catch (IOException e) {
|
||||
System.err.println("❌ KV metadata migration failed: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void kvMigration() throws IOException {
|
||||
this.namespacesPerTenant().entrySet().stream()
|
||||
.flatMap(namespacesForTenant -> namespacesForTenant.getValue().stream().map(namespace -> Map.entry(namespacesForTenant.getKey(), namespace)))
|
||||
.flatMap(throwFunction(namespaceForTenant -> {
|
||||
InternalKVStore kvStore = new InternalKVStore(namespaceForTenant.getKey(), namespaceForTenant.getValue(), storageInterface, kvMetadataRepository);
|
||||
List<FileAttributes> list = listAllFromStorage(storageInterface, namespaceForTenant.getKey(), namespaceForTenant.getValue());
|
||||
Map<Boolean, List<KVEntry>> entriesByIsExpired = list.stream()
|
||||
.map(throwFunction(fileAttributes -> KVEntry.from(namespaceForTenant.getValue(), fileAttributes)))
|
||||
.collect(Collectors.partitioningBy(kvEntry -> Optional.ofNullable(kvEntry.expirationDate()).map(expirationDate -> Instant.now().isAfter(expirationDate)).orElse(false)));
|
||||
|
||||
entriesByIsExpired.get(true).forEach(kvEntry -> {
|
||||
try {
|
||||
storageInterface.delete(
|
||||
namespaceForTenant.getKey(),
|
||||
namespaceForTenant.getValue(),
|
||||
kvStore.storageUri(kvEntry.key())
|
||||
);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
|
||||
return entriesByIsExpired.get(false).stream().map(kvEntry -> PersistedKvMetadata.from(namespaceForTenant.getKey(), kvEntry));
|
||||
}))
|
||||
.forEach(kvMetadataRepository::save);
|
||||
}
|
||||
|
||||
private static List<FileAttributes> listAllFromStorage(StorageInterface storage, String tenant, String namespace) throws IOException {
|
||||
try {
|
||||
return storage.list(tenant, namespace, URI.create(StorageContext.KESTRA_PROTOCOL + StorageContext.kvPrefix(namespace)));
|
||||
} catch (FileNotFoundException e) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import picocli.CommandLine;
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TenantMigrationCommand.class,
|
||||
MetadataMigrationCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
|
||||
@@ -49,7 +49,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
|
||||
try (var files = Files.walk(from); DefaultHttpClient client = client()) {
|
||||
if (delete) {
|
||||
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + to, null)));
|
||||
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + to, null)));
|
||||
}
|
||||
|
||||
KestraIgnore kestraIgnore = new KestraIgnore(from);
|
||||
@@ -67,7 +67,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
|
||||
client.toBlocking().exchange(
|
||||
this.requestOptions(
|
||||
HttpRequest.POST(
|
||||
apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + destination,
|
||||
apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + destination,
|
||||
body
|
||||
).contentType(MediaType.MULTIPART_FORM_DATA)
|
||||
)
|
||||
|
||||
@@ -49,7 +49,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
|
||||
|
||||
try (DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<List<Template>> request = HttpRequest
|
||||
.POST(apiUri("/templates/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, templates);
|
||||
.POST(apiUri("/templates/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, templates);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
package io.kestra.cli.listeners;
|
||||
|
||||
import io.kestra.core.server.LocalServiceState;
|
||||
import io.kestra.core.server.Service;
|
||||
import io.kestra.core.server.ServiceRegistry;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.context.event.ApplicationEventListener;
|
||||
import io.micronaut.context.event.ShutdownEvent;
|
||||
import io.micronaut.core.annotation.Order;
|
||||
import io.micronaut.core.order.Ordered;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
/**
|
||||
* Global application shutdown handler.
|
||||
* This handler gets effectively invoked before {@link jakarta.annotation.PreDestroy} does.
|
||||
*/
|
||||
@Singleton
|
||||
@Slf4j
|
||||
@Order(Ordered.LOWEST_PRECEDENCE)
|
||||
@Requires(property = "kestra.server-type")
|
||||
public class GracefulEmbeddedServiceShutdownListener implements ApplicationEventListener<ShutdownEvent> {
|
||||
@Inject
|
||||
ServiceRegistry serviceRegistry;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
**/
|
||||
@Override
|
||||
public boolean supports(ShutdownEvent event) {
|
||||
return ApplicationEventListener.super.supports(event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for services' close actions
|
||||
*
|
||||
* @param event the event to respond to
|
||||
*/
|
||||
@Override
|
||||
public void onApplicationEvent(ShutdownEvent event) {
|
||||
List<LocalServiceState> states = serviceRegistry.all();
|
||||
if (states.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug("Shutdown event received");
|
||||
|
||||
List<CompletableFuture<Void>> futures = states.stream()
|
||||
.map(state -> CompletableFuture.runAsync(() -> closeService(state), ForkJoinPool.commonPool()))
|
||||
.toList();
|
||||
|
||||
// Wait for all services to close, before shutting down the embedded server
|
||||
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
|
||||
}
|
||||
|
||||
private void closeService(LocalServiceState state) {
|
||||
final Service service = state.service();
|
||||
try {
|
||||
service.unwrap().close();
|
||||
} catch (Exception e) {
|
||||
log.error("[Service id={}, type={}] Unexpected error on close", service.getId(), service.getType(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,4 +16,11 @@ public class TenantIdSelectorService {
|
||||
}
|
||||
return MAIN_TENANT;
|
||||
}
|
||||
|
||||
public String getTenantIdAndAllowEETenants(String tenantId) {
|
||||
if (StringUtils.isNotBlank(tenantId)){
|
||||
return tenantId;
|
||||
}
|
||||
return MAIN_TENANT;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,26 @@ class FlowValidateCommandTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
// github action kestra-io/validate-action requires being able to validate Flows from OSS CLI against a remote EE instance
|
||||
void runForEEInstance() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
|
||||
String[] args = {
|
||||
"--tenant",
|
||||
"some-ee-tenant",
|
||||
"--local",
|
||||
"src/test/resources/helper/include.yaml"
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void warning() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
|
||||
@@ -0,0 +1,125 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import io.kestra.cli.App;
|
||||
import io.kestra.core.exceptions.ResourceExpiredException;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.kv.PersistedKvMetadata;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.StorageObject;
|
||||
import io.kestra.core.storages.kv.*;
|
||||
import io.kestra.core.tenant.TenantService;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.plugin.core.log.Log;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import io.micronaut.core.annotation.NonNull;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URI;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
public class MetadataMigrationCommandTest {
|
||||
@Test
|
||||
void run() throws IOException, ResourceExpiredException {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
ByteArrayOutputStream err = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(err));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
String namespace = TestsUtils.randomNamespace();
|
||||
String key = "myKey";
|
||||
StorageInterface storage = ctx.getBean(StorageInterface.class);
|
||||
String description = "Some description";
|
||||
String value = "someValue";
|
||||
putOldKv(storage, namespace, key, description, value);
|
||||
|
||||
String anotherNamespace = TestsUtils.randomNamespace();
|
||||
String anotherKey = "anotherKey";
|
||||
String anotherDescription = "another description";
|
||||
putOldKv(storage, anotherNamespace, anotherKey, anotherDescription, "anotherValue");
|
||||
|
||||
String tenantId = TenantService.MAIN_TENANT;
|
||||
|
||||
// Expired KV should not be migrated + should be purged from the storage
|
||||
String expiredKey = "expiredKey";
|
||||
putOldKv(storage, namespace, expiredKey, Instant.now().minus(Duration.ofMinutes(5)), "some expired description", "expiredValue");
|
||||
assertThat(storage.exists(tenantId, null, getKvStorageUri(namespace, expiredKey))).isTrue();
|
||||
|
||||
KvMetadataRepositoryInterface kvMetadataRepository = ctx.getBean(KvMetadataRepositoryInterface.class);
|
||||
assertThat(kvMetadataRepository.findByName(tenantId, namespace, key).isPresent()).isFalse();
|
||||
|
||||
String[] kvMetadataMigrationCommand = {
|
||||
"migrate", "metadata"
|
||||
};
|
||||
PicocliRunner.call(App.class, ctx, kvMetadataMigrationCommand);
|
||||
|
||||
|
||||
assertThat(out.toString()).contains("✅ Metadata migration complete.");
|
||||
// Still it's not in the metadata repository because no flow exist to find that kv
|
||||
assertThat(kvMetadataRepository.findByName(tenantId, namespace, key).isPresent()).isFalse();
|
||||
assertThat(kvMetadataRepository.findByName(tenantId, anotherNamespace, anotherKey).isPresent()).isFalse();
|
||||
|
||||
FlowRepositoryInterface flowRepository = ctx.getBean(FlowRepositoryInterface.class);
|
||||
flowRepository.create(GenericFlow.of(Flow.builder()
|
||||
.tenantId(tenantId)
|
||||
.id("a-flow")
|
||||
.namespace(namespace)
|
||||
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
|
||||
.build()));
|
||||
|
||||
out.reset();
|
||||
PicocliRunner.call(App.class, ctx, kvMetadataMigrationCommand);
|
||||
|
||||
assertThat(out.toString()).contains("✅ Metadata migration complete.");
|
||||
Optional<PersistedKvMetadata> foundKv = kvMetadataRepository.findByName(tenantId, namespace, key);
|
||||
assertThat(foundKv.isPresent()).isTrue();
|
||||
assertThat(foundKv.get().getDescription()).isEqualTo(description);
|
||||
assertThat(kvMetadataRepository.findByName(tenantId, anotherNamespace, anotherKey).isPresent()).isFalse();
|
||||
|
||||
KVStore kvStore = new InternalKVStore(tenantId, namespace, storage, kvMetadataRepository);
|
||||
Optional<KVEntry> actualKv = kvStore.get(key);
|
||||
assertThat(actualKv.isPresent()).isTrue();
|
||||
assertThat(actualKv.get().description()).isEqualTo(description);
|
||||
|
||||
Optional<KVValue> actualValue = kvStore.getValue(key);
|
||||
assertThat(actualValue.isPresent()).isTrue();
|
||||
assertThat(actualValue.get().value()).isEqualTo(value);
|
||||
|
||||
assertThat(kvMetadataRepository.findByName(tenantId, namespace, expiredKey).isPresent()).isFalse();
|
||||
assertThat(storage.exists(tenantId, null, getKvStorageUri(namespace, expiredKey))).isFalse();
|
||||
}
|
||||
}
|
||||
|
||||
private static void putOldKv(StorageInterface storage, String namespace, String key, String description, String value) throws IOException {
|
||||
putOldKv(storage, namespace, key, Instant.now().plus(Duration.ofMinutes(5)), description, value);
|
||||
}
|
||||
|
||||
private static void putOldKv(StorageInterface storage, String namespace, String key, Instant expirationDate, String description, String value) throws IOException {
|
||||
URI kvStorageUri = getKvStorageUri(namespace, key);
|
||||
KVValueAndMetadata kvValueAndMetadata = new KVValueAndMetadata(new KVMetadata(description, expirationDate), value);
|
||||
storage.put(TenantService.MAIN_TENANT, namespace, kvStorageUri, new StorageObject(
|
||||
kvValueAndMetadata.metadataAsMap(),
|
||||
new ByteArrayInputStream(JacksonMapper.ofIon().writeValueAsBytes(kvValueAndMetadata.value()))
|
||||
));
|
||||
}
|
||||
|
||||
private static @NonNull URI getKvStorageUri(String namespace, String key) {
|
||||
return URI.create(StorageContext.KESTRA_PROTOCOL + StorageContext.kvPrefix(namespace) + "/" + key + ".ion");
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,8 @@ import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
/**
|
||||
* Top-level marker interface for Kestra's plugin of type App.
|
||||
*/
|
||||
@@ -18,6 +20,6 @@ public interface AppBlockInterface extends io.kestra.core.models.Plugin {
|
||||
)
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
String getType();
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
/**
|
||||
* Top-level marker interface for Kestra's plugin of type App.
|
||||
*/
|
||||
@@ -18,6 +20,6 @@ public interface AppPluginInterface extends io.kestra.core.models.Plugin {
|
||||
)
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
String getType();
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import com.github.victools.jsonschema.generator.impl.DefinitionKey;
|
||||
import com.github.victools.jsonschema.generator.naming.DefaultSchemaDefinitionNamingStrategy;
|
||||
import com.github.victools.jsonschema.module.jackson.JacksonModule;
|
||||
import com.github.victools.jsonschema.module.jackson.JacksonOption;
|
||||
import com.github.victools.jsonschema.module.jackson.JsonUnwrappedDefinitionProvider;
|
||||
import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationModule;
|
||||
import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationOption;
|
||||
import com.github.victools.jsonschema.module.swagger2.Swagger2Module;
|
||||
@@ -45,6 +46,9 @@ import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.lang.reflect.*;
|
||||
import java.time.*;
|
||||
@@ -58,7 +62,9 @@ import static io.kestra.core.docs.AbstractClassDocumentation.required;
|
||||
import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class JsonSchemaGenerator {
|
||||
|
||||
private static final List<Class<?>> TYPES_RESOLVED_AS_STRING = List.of(Duration.class, LocalTime.class, LocalDate.class, LocalDateTime.class, ZonedDateTime.class, OffsetDateTime.class, OffsetTime.class);
|
||||
private static final List<Class<?>> SUBTYPE_RESOLUTION_EXCLUSION_FOR_PLUGIN_SCHEMA = List.of(Task.class, AbstractTrigger.class);
|
||||
|
||||
@@ -270,8 +276,22 @@ public class JsonSchemaGenerator {
|
||||
.with(Option.DEFINITIONS_FOR_ALL_OBJECTS)
|
||||
.with(Option.DEFINITION_FOR_MAIN_SCHEMA)
|
||||
.with(Option.PLAIN_DEFINITION_KEYS)
|
||||
.with(Option.ALLOF_CLEANUP_AT_THE_END);;
|
||||
|
||||
.with(Option.ALLOF_CLEANUP_AT_THE_END);
|
||||
|
||||
// HACK: Registered a custom JsonUnwrappedDefinitionProvider prior to the JacksonModule
|
||||
// to be able to return an CustomDefinition with an empty node when the ResolvedType can't be found.
|
||||
builder.forTypesInGeneral().withCustomDefinitionProvider(new JsonUnwrappedDefinitionProvider(){
|
||||
@Override
|
||||
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
|
||||
try {
|
||||
return super.provideCustomSchemaDefinition(javaType, context);
|
||||
} catch (NoClassDefFoundError e) {
|
||||
// This error happens when a non-supported plugin type exists in the classpath.
|
||||
log.debug("Cannot create schema definition for type '{}'. Cause: NoClassDefFoundError", javaType.getTypeName());
|
||||
return new CustomDefinition(context.getGeneratorConfig().createObjectNode(), true);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (!draft7) {
|
||||
builder.with(new JacksonModule(JacksonOption.IGNORE_TYPE_INFO_TRANSFORM));
|
||||
} else {
|
||||
@@ -300,6 +320,7 @@ public class JsonSchemaGenerator {
|
||||
// inline some type
|
||||
builder.forTypesInGeneral()
|
||||
.withCustomDefinitionProvider(new CustomDefinitionProviderV2() {
|
||||
|
||||
@Override
|
||||
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
|
||||
if (javaType.isInstanceOf(Map.class) || javaType.isInstanceOf(Enum.class)) {
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
package io.kestra.core.exceptions;
|
||||
|
||||
public class InvalidTriggerConfigurationException extends KestraRuntimeException {
|
||||
public InvalidTriggerConfigurationException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public InvalidTriggerConfigurationException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public InvalidTriggerConfigurationException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@@ -91,11 +91,13 @@ public class HttpConfiguration {
|
||||
@Deprecated
|
||||
private final String proxyPassword;
|
||||
|
||||
@Schema(title = "The username for HTTP basic authentication.")
|
||||
@Schema(title = "The username for HTTP basic authentication. " +
|
||||
"Deprecated, use `auth` property with a `BasicAuthConfiguration` instance instead.")
|
||||
@Deprecated
|
||||
private final String basicAuthUser;
|
||||
|
||||
@Schema(title = "The password for HTTP basic authentication.")
|
||||
@Schema(title = "The password for HTTP basic authentication. " +
|
||||
"Deprecated, use `auth` property with a `BasicAuthConfiguration` instance instead.")
|
||||
@Deprecated
|
||||
private final String basicAuthPassword;
|
||||
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
public enum FetchVersion {
|
||||
LATEST,
|
||||
OLD,
|
||||
ALL
|
||||
}
|
||||
@@ -100,7 +100,7 @@ public record QueryFilter(
|
||||
LABELS("labels") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS);
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.IN, Op.NOT_IN, Op.CONTAINS);
|
||||
}
|
||||
},
|
||||
FLOW_ID("flowId") {
|
||||
@@ -109,6 +109,12 @@ public record QueryFilter(
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
|
||||
}
|
||||
},
|
||||
UPDATED("updated") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.GREATER_THAN_OR_EQUAL_TO, Op.GREATER_THAN, Op.LESS_THAN_OR_EQUAL_TO, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
|
||||
}
|
||||
},
|
||||
START_DATE("startDate") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
@@ -250,6 +256,25 @@ public record QueryFilter(
|
||||
Field.START_DATE, Field.END_DATE, Field.TRIGGER_ID
|
||||
);
|
||||
}
|
||||
},
|
||||
SECRET_METADATA {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(
|
||||
Field.QUERY,
|
||||
Field.NAMESPACE
|
||||
);
|
||||
}
|
||||
},
|
||||
KV_METADATA {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(
|
||||
Field.QUERY,
|
||||
Field.NAMESPACE,
|
||||
Field.UPDATED
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
public abstract List<Field> supportedField();
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
public record TenantAndNamespace(String tenantId, String namespace) {}
|
||||
@@ -12,6 +12,8 @@ import lombok.experimental.SuperBuilder;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@io.kestra.core.models.annotations.Plugin
|
||||
@SuperBuilder
|
||||
@Getter
|
||||
@@ -20,6 +22,6 @@ import jakarta.validation.constraints.Pattern;
|
||||
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
|
||||
public abstract class Condition implements Plugin, Rethrow.PredicateChecked<ConditionContext, InternalException> {
|
||||
@NotNull
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
protected String type;
|
||||
}
|
||||
|
||||
@@ -32,6 +32,8 @@ public class Dashboard implements HasUID, DeletedInterface {
|
||||
private String tenantId;
|
||||
|
||||
@Hidden
|
||||
@NotNull
|
||||
@NotBlank
|
||||
private String id;
|
||||
|
||||
@NotNull
|
||||
|
||||
@@ -20,6 +20,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@@ -28,7 +30,7 @@ import java.util.Set;
|
||||
public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F>> implements io.kestra.core.models.Plugin, IData<F> {
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
private String type;
|
||||
|
||||
private Map<String, C> columns;
|
||||
|
||||
@@ -19,6 +19,8 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@@ -27,7 +29,7 @@ import java.util.Set;
|
||||
public abstract class DataFilterKPI<F extends Enum<F>, C extends ColumnDescriptor<F>> implements io.kestra.core.models.Plugin, IData<F> {
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
private String type;
|
||||
|
||||
private C columns;
|
||||
|
||||
@@ -12,6 +12,8 @@ import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@@ -26,7 +28,7 @@ public abstract class Chart<P extends ChartOption> implements io.kestra.core.mod
|
||||
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
protected String type;
|
||||
|
||||
@Valid
|
||||
|
||||
@@ -941,7 +941,15 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
for (TaskRun current : taskRuns) {
|
||||
if (!MapUtils.isEmpty(current.getOutputs())) {
|
||||
if (current.getIteration() != null) {
|
||||
taskOutputs = MapUtils.merge(taskOutputs, outputs(current, byIds));
|
||||
Map<String, Object> merged = MapUtils.merge(taskOutputs, outputs(current, byIds));
|
||||
// If one of two of the map is null in the merge() method, we just return the other
|
||||
// And if the not null map is a Variables (= read only), we cast it back to a simple
|
||||
// hashmap to avoid taskOutputs becoming read-only
|
||||
// i.e this happen in nested loopUntil tasks
|
||||
if (merged instanceof Variables) {
|
||||
merged = new HashMap<>(merged);
|
||||
}
|
||||
taskOutputs = merged;
|
||||
} else {
|
||||
taskOutputs.putAll(outputs(current, byIds));
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import io.swagger.v3.oas.annotations.Hidden;
|
||||
import jakarta.annotation.Nullable;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
@@ -120,6 +121,16 @@ public class LogEntry implements DeletedInterface, TenantInterface {
|
||||
return logEntry.getTimestamp().toString() + " " + logEntry.getLevel() + " " + logEntry.getMessage();
|
||||
}
|
||||
|
||||
public static String toPrettyString(LogEntry logEntry, Integer maxMessageSize) {
|
||||
String message;
|
||||
if (maxMessageSize != null && maxMessageSize > 0) {
|
||||
message = StringUtils.truncate(logEntry.getMessage(), maxMessageSize);
|
||||
} else {
|
||||
message = logEntry.getMessage();
|
||||
}
|
||||
return logEntry.getTimestamp().toString() + " " + logEntry.getLevel() + " " + message;
|
||||
}
|
||||
|
||||
public Map<String, String> toMap() {
|
||||
return Stream
|
||||
.of(
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.executions.metrics.Counter;
|
||||
import io.kestra.core.models.executions.metrics.Gauge;
|
||||
import io.kestra.core.models.executions.metrics.Timer;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
import jakarta.annotation.Nullable;
|
||||
@@ -82,6 +83,10 @@ public class MetricEntry implements DeletedInterface, TenantInterface {
|
||||
return counter.getValue();
|
||||
}
|
||||
|
||||
if (metricEntry instanceof Gauge gauge) {
|
||||
return gauge.getValue();
|
||||
}
|
||||
|
||||
if (metricEntry instanceof Timer timer) {
|
||||
return (double) timer.getValue().toMillis();
|
||||
}
|
||||
|
||||
@@ -3,7 +3,9 @@ package io.kestra.core.models.executions;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.tasks.FlowableTask;
|
||||
import io.kestra.core.models.tasks.ResolvedTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
@@ -52,6 +54,7 @@ public class TaskRun implements TenantInterface {
|
||||
|
||||
@With
|
||||
@JsonInclude(JsonInclude.Include.ALWAYS)
|
||||
@Nullable
|
||||
Variables outputs;
|
||||
|
||||
@NotNull
|
||||
@@ -64,7 +67,6 @@ public class TaskRun implements TenantInterface {
|
||||
Boolean dynamic;
|
||||
|
||||
// Set it to true to force execution even if the execution is killed
|
||||
@Nullable
|
||||
@With
|
||||
Boolean forceExecution;
|
||||
|
||||
@@ -193,17 +195,17 @@ public class TaskRun implements TenantInterface {
|
||||
taskRunBuilder.attempts = new ArrayList<>();
|
||||
|
||||
taskRunBuilder.attempts.add(TaskRunAttempt.builder()
|
||||
.state(new State(this.state, State.Type.KILLED))
|
||||
.state(new State(this.state, State.Type.RESUBMITTED))
|
||||
.build()
|
||||
);
|
||||
} else {
|
||||
ArrayList<TaskRunAttempt> taskRunAttempts = new ArrayList<>(taskRunBuilder.attempts);
|
||||
TaskRunAttempt lastAttempt = taskRunAttempts.get(taskRunBuilder.attempts.size() - 1);
|
||||
if (!lastAttempt.getState().isTerminated()) {
|
||||
taskRunAttempts.set(taskRunBuilder.attempts.size() - 1, lastAttempt.withState(State.Type.KILLED));
|
||||
taskRunAttempts.set(taskRunBuilder.attempts.size() - 1, lastAttempt.withState(State.Type.RESUBMITTED));
|
||||
} else {
|
||||
taskRunAttempts.add(TaskRunAttempt.builder()
|
||||
.state(new State().withState(State.Type.KILLED))
|
||||
.state(new State().withState(State.Type.RESUBMITTED))
|
||||
.build()
|
||||
);
|
||||
}
|
||||
@@ -217,7 +219,7 @@ public class TaskRun implements TenantInterface {
|
||||
public boolean isSame(TaskRun taskRun) {
|
||||
return this.getId().equals(taskRun.getId()) &&
|
||||
((this.getValue() == null && taskRun.getValue() == null) || (this.getValue() != null && this.getValue().equals(taskRun.getValue()))) &&
|
||||
((this.getIteration() == null && taskRun.getIteration() == null) || (this.getIteration() != null && this.getIteration().equals(taskRun.getIteration()))) ;
|
||||
((this.getIteration() == null && taskRun.getIteration() == null) || (this.getIteration() != null && this.getIteration().equals(taskRun.getIteration())));
|
||||
}
|
||||
|
||||
public String toString(boolean pretty) {
|
||||
@@ -249,7 +251,7 @@ public class TaskRun implements TenantInterface {
|
||||
* This method is used when the retry is apply on a task
|
||||
* but the retry type is NEW_EXECUTION
|
||||
*
|
||||
* @param retry Contains the retry configuration
|
||||
* @param retry Contains the retry configuration
|
||||
* @param execution Contains the attempt number and original creation date
|
||||
* @return The next retry date, null if maxAttempt || maxDuration is reached
|
||||
*/
|
||||
@@ -270,6 +272,7 @@ public class TaskRun implements TenantInterface {
|
||||
|
||||
/**
|
||||
* This method is used when the Retry definition comes from the flow
|
||||
*
|
||||
* @param retry The retry configuration
|
||||
* @return The next retry date, null if maxAttempt || maxDuration is reached
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
package io.kestra.core.models.executions.metrics;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import jakarta.annotation.Nullable;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.ToString;
|
||||
import io.kestra.core.metrics.MetricRegistry;
|
||||
import io.kestra.core.models.executions.AbstractMetricEntry;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.util.Map;
|
||||
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
public class Gauge extends AbstractMetricEntry<Double> {
|
||||
public static final String TYPE = "gauge";
|
||||
|
||||
@NotNull
|
||||
@JsonInclude
|
||||
private final String type = TYPE;
|
||||
|
||||
@NotNull
|
||||
@EqualsAndHashCode.Exclude
|
||||
private Double value;
|
||||
|
||||
private Gauge(@NotNull String name, @Nullable String description, @NotNull Double value, String... tags) {
|
||||
super(name, description, tags);
|
||||
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @NotNull Double value, String... tags) {
|
||||
return new Gauge(name, null, value, tags);
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Double value, String... tags) {
|
||||
return new Gauge(name, description, value, tags);
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @NotNull Integer value, String... tags) {
|
||||
return new Gauge(name, null, (double) value, tags);
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Integer value, String... tags) {
|
||||
return new Gauge(name, description, (double) value, tags);
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @NotNull Long value, String... tags) {
|
||||
return new Gauge(name, null, (double) value, tags);
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Long value, String... tags) {
|
||||
return new Gauge(name, description, (double) value, tags);
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @NotNull Float value, String... tags) {
|
||||
return new Gauge(name, null, (double) value, tags);
|
||||
}
|
||||
|
||||
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Float value, String... tags) {
|
||||
return new Gauge(name, description, (double) value, tags);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void register(MetricRegistry meterRegistry, String name, String description, Map<String, String> tags) {
|
||||
meterRegistry
|
||||
.gauge(this.metricName(name), description, this.value, this.tagsAsArray(tags));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void increment(Double value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
@@ -77,14 +77,6 @@ public abstract class AbstractFlow implements FlowInterface {
|
||||
Map<String, Object> variables;
|
||||
|
||||
|
||||
@Schema(
|
||||
oneOf = {
|
||||
String.class, // Corresponds to 'type: string' in OpenAPI
|
||||
Map.class // Corresponds to 'type: object' in OpenAPI
|
||||
}
|
||||
)
|
||||
interface StringOrMapValue {}
|
||||
|
||||
@Valid
|
||||
private WorkerGroup workerGroup;
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ import java.util.stream.Stream;
|
||||
public class Flow extends AbstractFlow implements HasUID {
|
||||
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofYaml()
|
||||
.copy()
|
||||
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
|
||||
private static final ObjectMapper WITHOUT_REVISION_OBJECT_MAPPER = NON_DEFAULT_OBJECT_MAPPER.copy()
|
||||
.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true)
|
||||
|
||||
@@ -136,7 +136,7 @@ public interface FlowInterface extends FlowId, DeletedInterface, TenantInterface
|
||||
class SourceGenerator {
|
||||
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofJson()
|
||||
.copy()
|
||||
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
|
||||
static String generate(final FlowInterface flow) {
|
||||
try {
|
||||
|
||||
@@ -5,6 +5,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.kestra.core.models.flows.input.*;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.validations.InputValidation;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
@@ -44,6 +45,7 @@ import lombok.experimental.SuperBuilder;
|
||||
@JsonSubTypes.Type(value = YamlInput.class, name = "YAML"),
|
||||
@JsonSubTypes.Type(value = EmailInput.class, name = "EMAIL"),
|
||||
})
|
||||
@InputValidation
|
||||
public abstract class Input<T> implements Data {
|
||||
@Schema(
|
||||
title = "The ID of the input."
|
||||
@@ -80,7 +82,13 @@ public abstract class Input<T> implements Data {
|
||||
title = "The default value to use if no value is specified."
|
||||
)
|
||||
Property<T> defaults;
|
||||
|
||||
|
||||
@Schema(
|
||||
title = "The suggested value for the input.",
|
||||
description = "Optional UI hint for pre-filling the input. Cannot be used together with a default value."
|
||||
)
|
||||
Property<T> prefill;
|
||||
|
||||
@Schema(
|
||||
title = "The display name of the input."
|
||||
)
|
||||
|
||||
@@ -24,10 +24,6 @@ public class PluginDefault {
|
||||
|
||||
@Schema(
|
||||
type = "object",
|
||||
oneOf = {
|
||||
Map.class,
|
||||
String.class
|
||||
},
|
||||
additionalProperties = Schema.AdditionalPropertiesValue.FALSE
|
||||
)
|
||||
private final Map<String, Object> values;
|
||||
|
||||
@@ -236,14 +236,15 @@ public class State {
|
||||
RETRYING,
|
||||
RETRIED,
|
||||
SKIPPED,
|
||||
BREAKPOINT;
|
||||
BREAKPOINT,
|
||||
RESUBMITTED;
|
||||
|
||||
public boolean isTerminated() {
|
||||
return this == Type.FAILED || this == Type.WARNING || this == Type.SUCCESS || this == Type.KILLED || this == Type.CANCELLED || this == Type.RETRIED || this == Type.SKIPPED;
|
||||
return this == Type.FAILED || this == Type.WARNING || this == Type.SUCCESS || this == Type.KILLED || this == Type.CANCELLED || this == Type.RETRIED || this == Type.SKIPPED || this == Type.RESUBMITTED;
|
||||
}
|
||||
|
||||
public boolean isTerminatedNoFail() {
|
||||
return this == Type.WARNING || this == Type.SUCCESS || this == Type.RETRIED || this == Type.SKIPPED;
|
||||
return this == Type.WARNING || this == Type.SUCCESS || this == Type.RETRIED || this == Type.SKIPPED || this == Type.RESUBMITTED;
|
||||
}
|
||||
|
||||
public boolean isCreated() {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.models.flows.input;
|
||||
|
||||
import java.util.Set;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.validations.FileInputValidation;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
@@ -22,10 +23,35 @@ public class FileInput extends Input<URI> {
|
||||
|
||||
@Deprecated(since = "0.24", forRemoval = true)
|
||||
public String extension;
|
||||
|
||||
/**
|
||||
* List of allowed file extensions (e.g., [".csv", ".txt", ".pdf"]).
|
||||
* Each extension must start with a dot.
|
||||
*/
|
||||
private List<String> allowedFileExtensions;
|
||||
|
||||
/**
|
||||
* Gets the file extension from the URI's path
|
||||
*/
|
||||
private String getFileExtension(URI uri) {
|
||||
String path = uri.getPath();
|
||||
int lastDotIndex = path.lastIndexOf(".");
|
||||
return lastDotIndex >= 0 ? path.substring(lastDotIndex).toLowerCase() : "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate(URI input) throws ConstraintViolationException {
|
||||
// no validation yet
|
||||
if (input == null || allowedFileExtensions == null || allowedFileExtensions.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
String extension = getFileExtension(input);
|
||||
if (!allowedFileExtensions.contains(extension.toLowerCase())) {
|
||||
throw new ConstraintViolationException(
|
||||
"File type not allowed. Accepted extensions: " + String.join(", ", allowedFileExtensions),
|
||||
Set.of()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static String findFileInputExtension(@NotNull final List<Input<?>> inputs, @NotNull final String fileName) {
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
package io.kestra.core.models.kv;
|
||||
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.storages.kv.KVEntry;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.*;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Optional;
|
||||
|
||||
@Builder(toBuilder = true)
|
||||
@Slf4j
|
||||
@Getter
|
||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
||||
@AllArgsConstructor
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
public class PersistedKvMetadata implements DeletedInterface, TenantInterface, HasUID {
|
||||
@With
|
||||
@Hidden
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
|
||||
private String tenantId;
|
||||
|
||||
@NotNull
|
||||
private String namespace;
|
||||
|
||||
@NotNull
|
||||
private String name;
|
||||
|
||||
private String description;
|
||||
|
||||
@NotNull
|
||||
private Integer version;
|
||||
|
||||
@Builder.Default
|
||||
private boolean last = true;
|
||||
|
||||
@Nullable
|
||||
private Instant expirationDate;
|
||||
|
||||
@Nullable
|
||||
private Instant created;
|
||||
|
||||
@Nullable
|
||||
private Instant updated;
|
||||
|
||||
private boolean deleted;
|
||||
|
||||
public static PersistedKvMetadata from(String tenantId, KVEntry kvEntry) {
|
||||
return PersistedKvMetadata.builder()
|
||||
.tenantId(tenantId)
|
||||
.namespace(kvEntry.namespace())
|
||||
.name(kvEntry.key())
|
||||
.version(kvEntry.version())
|
||||
.description(kvEntry.description())
|
||||
.created(kvEntry.creationDate())
|
||||
.updated(kvEntry.updateDate())
|
||||
.expirationDate(kvEntry.expirationDate())
|
||||
.build();
|
||||
}
|
||||
|
||||
public PersistedKvMetadata asLast() {
|
||||
Instant saveDate = Instant.now();
|
||||
return this.toBuilder().created(Optional.ofNullable(this.created).orElse(saveDate)).updated(saveDate).last(true).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String uid() {
|
||||
return IdUtils.fromParts(getTenantId(), getNamespace(), getName(), getVersion().toString());
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,8 @@ import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
|
||||
public interface TaskInterface extends Plugin, PluginVersioning {
|
||||
@NotNull
|
||||
@@ -17,7 +19,7 @@ public interface TaskInterface extends Plugin, PluginVersioning {
|
||||
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
@Schema(title = "The class name of this task.")
|
||||
String getType();
|
||||
}
|
||||
|
||||
@@ -11,6 +11,8 @@ import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@Plugin
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@@ -22,7 +24,7 @@ public abstract class LogExporter<T extends Output> implements io.kestra.core.m
|
||||
protected String id;
|
||||
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
protected String type;
|
||||
|
||||
public abstract T sendLogs(RunContext runContext, Flux<LogRecord> logRecords) throws Exception;
|
||||
|
||||
@@ -8,12 +8,16 @@ public final class LogRecordMapper {
|
||||
private LogRecordMapper(){}
|
||||
|
||||
public static LogRecord mapToLogRecord(LogEntry log) {
|
||||
return mapToLogRecord(log, null);
|
||||
}
|
||||
|
||||
public static LogRecord mapToLogRecord(LogEntry log, Integer maxMessageSize) {
|
||||
return LogRecord.builder()
|
||||
.resource("Kestra")
|
||||
.timestampEpochNanos(instantInNanos(log.getTimestamp()))
|
||||
.severity(log.getLevel().name())
|
||||
.attributes(log.toLogMap())
|
||||
.bodyValue(LogEntry.toPrettyString(log))
|
||||
.bodyValue(LogEntry.toPrettyString(log, maxMessageSize))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import java.util.Map;
|
||||
@JsonSubTypes({
|
||||
@JsonSubTypes.Type(value = CounterMetric.class, name = "counter"),
|
||||
@JsonSubTypes.Type(value = TimerMetric.class, name = "timer"),
|
||||
@JsonSubTypes.Type(value = GaugeMetric.class, name = "gauge"),
|
||||
})
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
package io.kestra.core.models.tasks.metrics;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.executions.AbstractMetricEntry;
|
||||
import io.kestra.core.models.executions.metrics.Gauge;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
@SuperBuilder
|
||||
public class GaugeMetric extends AbstractMetric {
|
||||
public static final String TYPE = "gauge";
|
||||
|
||||
@NotNull
|
||||
@EqualsAndHashCode.Exclude
|
||||
private Property<Double> value;
|
||||
|
||||
@Override
|
||||
public AbstractMetricEntry<?> toMetric(RunContext runContext) throws IllegalVariableEvaluationException {
|
||||
String name = runContext.render(this.name).as(String.class).orElseThrow();
|
||||
Double value = runContext.render(this.value).as(Double.class).orElseThrow();
|
||||
String description = runContext.render(this.description).as(String.class).orElse(null);
|
||||
Map<String, String> tags = runContext.render(this.tags).asMap(String.class, String.class);
|
||||
String[] tagsAsStrings = tags.entrySet().stream()
|
||||
.flatMap(e -> Stream.of(e.getKey(), e.getValue()))
|
||||
.toArray(String[]::new);
|
||||
|
||||
return Gauge.of(name, description, value, tagsAsStrings);
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static io.kestra.core.utils.WindowsUtils.windowsToUnixPath;
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
/**
|
||||
* Base class for all task runners.
|
||||
@@ -36,7 +37,7 @@ import static io.kestra.core.utils.WindowsUtils.windowsToUnixPath;
|
||||
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
|
||||
public abstract class TaskRunner<T extends TaskRunnerDetailResult> implements Plugin, PluginVersioning, WorkerJobLifecycle {
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
protected String type;
|
||||
|
||||
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
|
||||
|
||||
@@ -44,7 +44,7 @@ public class Template implements DeletedInterface, TenantInterface, HasUID {
|
||||
return exclusions.contains(m.getName()) || super.hasIgnoreMarker(m);
|
||||
}
|
||||
})
|
||||
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
|
||||
@Setter
|
||||
@Hidden
|
||||
|
||||
@@ -47,7 +47,6 @@ abstract public class AbstractTrigger implements TriggerInterface {
|
||||
@Valid
|
||||
protected List<@Valid @NotNull Condition> conditions;
|
||||
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
|
||||
@Schema(defaultValue = "false")
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.exceptions.InvalidTriggerConfigurationException;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.time.DateTimeException;
|
||||
import java.time.Duration;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Optional;
|
||||
@@ -29,15 +31,29 @@ public interface PollingTriggerInterface extends WorkerTriggerInterface {
|
||||
* Compute the next evaluation date of the trigger based on the existing trigger context: by default, it uses the current date and the interval.
|
||||
* Schedulable triggers must override this method.
|
||||
*/
|
||||
default ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) throws Exception {
|
||||
return ZonedDateTime.now().plus(this.getInterval());
|
||||
default ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) throws InvalidTriggerConfigurationException {
|
||||
return computeNextEvaluationDate();
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the next evaluation date of the trigger: by default, it uses the current date and the interval.
|
||||
* Schedulable triggers must override this method as it's used to init them when there is no evaluation date.
|
||||
*/
|
||||
default ZonedDateTime nextEvaluationDate() {
|
||||
return ZonedDateTime.now().plus(this.getInterval());
|
||||
default ZonedDateTime nextEvaluationDate() throws InvalidTriggerConfigurationException {
|
||||
return computeNextEvaluationDate();
|
||||
}
|
||||
|
||||
/**
|
||||
* computes the next evaluation date using the configured interval.
|
||||
* Throw InvalidTriggerConfigurationException, if the interval causes date overflow.
|
||||
*/
|
||||
private ZonedDateTime computeNextEvaluationDate() throws InvalidTriggerConfigurationException {
|
||||
Duration interval = this.getInterval();
|
||||
|
||||
try {
|
||||
return ZonedDateTime.now().plus(interval);
|
||||
} catch (DateTimeException | ArithmeticException e) {
|
||||
throw new InvalidTriggerConfigurationException("Trigger interval too large", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
public interface StatefulTriggerInterface {
|
||||
@Schema(
|
||||
title = "Trigger event type",
|
||||
description = """
|
||||
Defines when the trigger fires.
|
||||
- `CREATE`: only for newly discovered entities.
|
||||
- `UPDATE`: only when an already-seen entity changes.
|
||||
- `CREATE_OR_UPDATE`: fires on either event.
|
||||
"""
|
||||
)
|
||||
Property<On> getOn();
|
||||
|
||||
@Schema(
|
||||
title = "State key",
|
||||
description = """
|
||||
JSON-type KV key for persisted state.
|
||||
Default: `<namespace>__<flowId>__<triggerId>`
|
||||
"""
|
||||
)
|
||||
Property<String> getStateKey();
|
||||
|
||||
@Schema(
|
||||
title = "State TTL",
|
||||
description = "TTL for persisted state entries (e.g., PT24H, P7D)."
|
||||
)
|
||||
Property<Duration> getStateTtl();
|
||||
|
||||
enum On {
|
||||
CREATE,
|
||||
UPDATE,
|
||||
CREATE_OR_UPDATE
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.storages.kv.KVMetadata;
|
||||
import io.kestra.core.storages.kv.KVValueAndMetadata;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class StatefulTriggerService {
|
||||
public record Entry(String uri, String version, Instant modifiedAt, Instant lastSeenAt) {
|
||||
public static Entry candidate(String uri, String version, Instant modifiedAt) {
|
||||
return new Entry(uri, version, modifiedAt, null);
|
||||
}
|
||||
}
|
||||
|
||||
public record StateUpdate(boolean fire, boolean isNew) {}
|
||||
|
||||
public static Map<String, Entry> readState(RunContext runContext, String key, Optional<Duration> ttl) {
|
||||
try {
|
||||
var kv = runContext.namespaceKv(runContext.flowInfo().namespace()).getValue(key);
|
||||
if (kv.isEmpty()) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
var entries = JacksonMapper.ofJson().readValue((byte[]) kv.get().value(), new TypeReference<List<Entry>>() {});
|
||||
|
||||
var cutoff = ttl.map(d -> Instant.now().minus(d)).orElse(Instant.MIN);
|
||||
|
||||
return entries.stream()
|
||||
.filter(e -> Optional.ofNullable(e.lastSeenAt()).orElse(Instant.now()).isAfter(cutoff))
|
||||
.collect(Collectors.toMap(Entry::uri, e -> e));
|
||||
} catch (Exception e) {
|
||||
runContext.logger().warn("readState failed", e);
|
||||
return new HashMap<>();
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeState(RunContext runContext, String key, Map<String, Entry> state, Optional<Duration> ttl) {
|
||||
try {
|
||||
var bytes = JacksonMapper.ofJson().writeValueAsBytes(state.values());
|
||||
var meta = new KVMetadata("trigger state", ttl.orElse(null));
|
||||
|
||||
runContext.namespaceKv(runContext.flowInfo().namespace()).put(key, new KVValueAndMetadata(meta, bytes));
|
||||
} catch (Exception e) {
|
||||
runContext.logger().warn("writeState failed", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static StateUpdate computeAndUpdateState(Map<String, Entry> state, Entry candidate, StatefulTriggerInterface.On on) {
|
||||
var prev = state.get(candidate.uri());
|
||||
var isNew = prev == null;
|
||||
var fire = shouldFire(prev, candidate.version(), on);
|
||||
|
||||
Instant lastSeenAt;
|
||||
|
||||
if (fire || isNew) {
|
||||
// it is new seen or changed
|
||||
lastSeenAt = Instant.now();
|
||||
} else if (prev.lastSeenAt() != null) {
|
||||
// it is unchanged but already tracked before
|
||||
lastSeenAt = prev.lastSeenAt();
|
||||
} else {
|
||||
lastSeenAt = Instant.now();
|
||||
}
|
||||
|
||||
var newEntry = new Entry(candidate.uri(), candidate.version(), candidate.modifiedAt(), lastSeenAt);
|
||||
|
||||
state.put(candidate.uri(), newEntry);
|
||||
|
||||
return new StatefulTriggerService.StateUpdate(fire, isNew);
|
||||
}
|
||||
|
||||
public static boolean shouldFire(Entry prev, String version, StatefulTriggerInterface.On on) {
|
||||
if (prev == null) {
|
||||
return on == StatefulTriggerInterface.On.CREATE || on == StatefulTriggerInterface.On.CREATE_OR_UPDATE;
|
||||
}
|
||||
if (!Objects.equals(prev.version(), version)) {
|
||||
return on == StatefulTriggerInterface.On.UPDATE || on == StatefulTriggerInterface.On.CREATE_OR_UPDATE;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static String defaultKey(String ns, String flowId, String triggerId) {
|
||||
return String.join("_", ns, flowId, triggerId);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.exceptions.InvalidTriggerConfigurationException;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
@@ -167,9 +168,14 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
// Used to update trigger in flowListeners
|
||||
public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger, ConditionContext conditionContext, Optional<Trigger> lastTrigger) throws Exception {
|
||||
ZonedDateTime nextDate = null;
|
||||
boolean disabled = lastTrigger.map(TriggerContext::getDisabled).orElse(Boolean.FALSE);
|
||||
|
||||
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
|
||||
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
|
||||
try {
|
||||
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
|
||||
} catch (InvalidTriggerConfigurationException e) {
|
||||
disabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
return Trigger.builder()
|
||||
@@ -180,7 +186,7 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
.date(ZonedDateTime.now().truncatedTo(ChronoUnit.SECONDS))
|
||||
.nextExecutionDate(nextDate)
|
||||
.stopAfter(abstractTrigger.getStopAfter())
|
||||
.disabled(lastTrigger.map(TriggerContext::getDisabled).orElse(Boolean.FALSE))
|
||||
.disabled(disabled)
|
||||
.backfill(null)
|
||||
.build();
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
public interface TriggerInterface extends Plugin, PluginVersioning {
|
||||
@NotNull
|
||||
@@ -17,7 +18,7 @@ public interface TriggerInterface extends Plugin, PluginVersioning {
|
||||
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
@Schema(title = "The class name for this current trigger.")
|
||||
String getType();
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.*;
|
||||
|
||||
@@ -25,7 +24,7 @@ public abstract class TriggerService {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables, runContext.logFileURI());
|
||||
|
||||
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext.getFlow().getRevision());
|
||||
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext);
|
||||
}
|
||||
|
||||
public static Execution generateExecution(
|
||||
@@ -37,7 +36,7 @@ public abstract class TriggerService {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, output, runContext.logFileURI());
|
||||
|
||||
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext.getFlow().getRevision());
|
||||
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext);
|
||||
}
|
||||
|
||||
public static Execution generateRealtimeExecution(
|
||||
@@ -49,7 +48,7 @@ public abstract class TriggerService {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, output, runContext.logFileURI());
|
||||
|
||||
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext.getFlow().getRevision());
|
||||
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
|
||||
}
|
||||
|
||||
public static Execution generateScheduledExecution(
|
||||
@@ -75,6 +74,7 @@ public abstract class TriggerService {
|
||||
.namespace(context.getNamespace())
|
||||
.flowId(context.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.variables(conditionContext.getFlow().getVariables())
|
||||
.labels(executionLabels)
|
||||
.state(new State())
|
||||
.trigger(executionTrigger)
|
||||
@@ -108,7 +108,7 @@ public abstract class TriggerService {
|
||||
AbstractTrigger trigger,
|
||||
TriggerContext context,
|
||||
ExecutionTrigger executionTrigger,
|
||||
Integer flowRevision
|
||||
ConditionContext conditionContext
|
||||
) {
|
||||
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
|
||||
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
|
||||
@@ -120,7 +120,8 @@ public abstract class TriggerService {
|
||||
.namespace(context.getNamespace())
|
||||
.flowId(context.getFlowId())
|
||||
.tenantId(context.getTenantId())
|
||||
.flowRevision(flowRevision)
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.variables(conditionContext.getFlow().getVariables())
|
||||
.state(new State())
|
||||
.trigger(executionTrigger)
|
||||
.labels(executionLabels)
|
||||
|
||||
@@ -8,6 +8,8 @@ import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@io.kestra.core.models.annotations.Plugin
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@@ -15,6 +17,6 @@ import lombok.experimental.SuperBuilder;
|
||||
public abstract class AdditionalPlugin implements Plugin {
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
protected String type;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.plugins;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import lombok.Builder;
|
||||
|
||||
import java.io.File;
|
||||
@@ -33,7 +34,7 @@ public record PluginArtifact(
|
||||
String version,
|
||||
URI uri
|
||||
) implements Comparable<PluginArtifact> {
|
||||
|
||||
|
||||
private static final Pattern ARTIFACT_PATTERN = Pattern.compile(
|
||||
"([^: ]+):([^: ]+)(:([^: ]*)(:([^: ]+))?)?:([^: ]+)"
|
||||
);
|
||||
@@ -42,7 +43,8 @@ public record PluginArtifact(
|
||||
);
|
||||
|
||||
public static final String JAR_EXTENSION = "jar";
|
||||
|
||||
public static final String KESTRA_GROUP_ID = "io.kestra";
|
||||
|
||||
/**
|
||||
* Static helper method for constructing a new {@link PluginArtifact} from a JAR file.
|
||||
*
|
||||
@@ -135,6 +137,11 @@ public record PluginArtifact(
|
||||
public String toString() {
|
||||
return toCoordinates();
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public boolean isOfficial() {
|
||||
return groupId.startsWith(KESTRA_GROUP_ID);
|
||||
}
|
||||
|
||||
public String toCoordinates() {
|
||||
return Stream.of(groupId, artifactId, extension, classifier, version)
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
package io.kestra.core.plugins;
|
||||
|
||||
import io.kestra.core.contexts.KestraContext;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpMethod;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.HttpResponse;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.HttpClient;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -15,9 +18,12 @@ import java.util.Base64;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Services for retrieving available plugin artifacts for Kestra.
|
||||
@@ -58,6 +64,47 @@ public class PluginCatalogService {
|
||||
this.isLoaded.set(true);
|
||||
this.plugins = CompletableFuture.supplyAsync(this::load);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the version for the given artifacts.
|
||||
*
|
||||
* @param artifacts The list of artifacts to resolve.
|
||||
* @return The list of results.
|
||||
*/
|
||||
public List<PluginResolutionResult> resolveVersions(List<PluginArtifact> artifacts) {
|
||||
if (ListUtils.isEmpty(artifacts)) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
final Map<String, ApiPluginArtifact> pluginsByGroupAndArtifactId = getAllCompatiblePlugins().stream()
|
||||
.collect(Collectors.toMap(it -> it.groupId() + ":" + it.artifactId(), Function.identity()));
|
||||
|
||||
return artifacts.stream().map(it -> {
|
||||
// Get all compatible versions for current artifact
|
||||
List<String> versions = Optional
|
||||
.ofNullable(pluginsByGroupAndArtifactId.get(it.groupId() + ":" + it.artifactId()))
|
||||
.map(ApiPluginArtifact::versions)
|
||||
.orElse(List.of());
|
||||
|
||||
// Try to resolve the version
|
||||
String resolvedVersion = null;
|
||||
if (!versions.isEmpty()) {
|
||||
if (it.version().equalsIgnoreCase("LATEST")) {
|
||||
resolvedVersion = versions.getFirst();
|
||||
} else {
|
||||
resolvedVersion = versions.contains(it.version()) ? it.version() : null;
|
||||
}
|
||||
}
|
||||
|
||||
// Build the PluginResolutionResult
|
||||
return new PluginResolutionResult(
|
||||
it,
|
||||
resolvedVersion,
|
||||
versions,
|
||||
resolvedVersion != null
|
||||
);
|
||||
}).toList();
|
||||
}
|
||||
|
||||
public synchronized List<PluginManifest> get() {
|
||||
try {
|
||||
@@ -140,7 +187,26 @@ public class PluginCatalogService {
|
||||
isLoaded.set(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private List<ApiPluginArtifact> getAllCompatiblePlugins() {
|
||||
MutableHttpRequest<Object> request = HttpRequest.create(
|
||||
HttpMethod.GET,
|
||||
"/v1/plugins/artifacts/core-compatibility/" + KestraContext.getContext().getVersion()
|
||||
);
|
||||
if (oss) {
|
||||
request.getParameters().add("license", "OPENSOURCE");
|
||||
}
|
||||
try {
|
||||
return httpClient
|
||||
.toBlocking()
|
||||
.exchange(request, Argument.listOf(ApiPluginArtifact.class))
|
||||
.body();
|
||||
} catch (Exception e) {
|
||||
log.debug("Failed to retrieve available plugins from Kestra API. Cause: ", e);
|
||||
return List.of();
|
||||
}
|
||||
}
|
||||
|
||||
public record PluginManifest(
|
||||
String title,
|
||||
String icon,
|
||||
@@ -153,4 +219,11 @@ public class PluginCatalogService {
|
||||
return groupId + ":" + artifactId + ":LATEST";
|
||||
}
|
||||
}
|
||||
|
||||
public record ApiPluginArtifact(
|
||||
String groupId,
|
||||
String artifactId,
|
||||
String license,
|
||||
List<String> versions
|
||||
) {}
|
||||
}
|
||||
|
||||
@@ -14,19 +14,19 @@ import java.time.Instant;
|
||||
@Requires(property = "kestra.server-type")
|
||||
@Slf4j
|
||||
public class ReportableScheduler {
|
||||
|
||||
|
||||
private final ReportableRegistry registry;
|
||||
private final ServerEventSender sender;
|
||||
private final Clock clock;
|
||||
|
||||
|
||||
@Inject
|
||||
public ReportableScheduler(ReportableRegistry registry, ServerEventSender sender) {
|
||||
this.registry = registry;
|
||||
this.sender = sender;
|
||||
this.clock = Clock.systemDefaultZone();
|
||||
}
|
||||
|
||||
@Scheduled(fixedDelay = "5m", initialDelay = "${kestra.anonymous-usage-report.initial-delay}")
|
||||
|
||||
@Scheduled(fixedDelay = "5m", initialDelay = "${kestra.anonymous-usage-report.initial-delay:5m}")
|
||||
public void tick() {
|
||||
Instant now = clock.instant();
|
||||
for (Reportable<?> r : registry.getAll()) {
|
||||
|
||||
@@ -18,6 +18,7 @@ import io.micronaut.http.hateoas.JsonError;
|
||||
import io.micronaut.reactor.http.client.ReactorHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.Setter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.net.URI;
|
||||
@@ -28,29 +29,30 @@ import java.util.UUID;
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class ServerEventSender {
|
||||
|
||||
|
||||
private static final String SESSION_UUID = IdUtils.create();
|
||||
private static final ObjectMapper OBJECT_MAPPER = JacksonMapper.ofJson();
|
||||
|
||||
|
||||
@Inject
|
||||
@Client
|
||||
private ReactorHttpClient client;
|
||||
|
||||
|
||||
@Inject
|
||||
private VersionProvider versionProvider;
|
||||
|
||||
|
||||
@Inject
|
||||
private InstanceService instanceService;
|
||||
|
||||
|
||||
private final ServerType serverType;
|
||||
|
||||
@Value("${kestra.anonymous-usage-report.uri}")
|
||||
|
||||
@Setter
|
||||
@Value("${kestra.anonymous-usage-report.uri:'https://api.kestra.io/v1/reports/server-events'}")
|
||||
protected URI url;
|
||||
|
||||
|
||||
public ServerEventSender( ) {
|
||||
this.serverType = KestraContext.getContext().getServerType();
|
||||
}
|
||||
|
||||
|
||||
public void send(final Instant now, final Type type, Object event) {
|
||||
ServerEvent serverEvent = ServerEvent
|
||||
.builder()
|
||||
@@ -65,11 +67,11 @@ public class ServerEventSender {
|
||||
.build();
|
||||
try {
|
||||
MutableHttpRequest<ServerEvent> request = this.request(serverEvent, type);
|
||||
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Report anonymous usage: '{}'", OBJECT_MAPPER.writeValueAsString(serverEvent));
|
||||
}
|
||||
|
||||
|
||||
this.handleResponse(client.toBlocking().retrieve(request, Argument.of(Result.class), Argument.of(JsonError.class)));
|
||||
} catch (HttpClientResponseException t) {
|
||||
log.trace("Unable to report anonymous usage with body '{}'", t.getResponse().getBody(String.class), t);
|
||||
@@ -77,11 +79,11 @@ public class ServerEventSender {
|
||||
log.trace("Unable to handle anonymous usage", t);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void handleResponse (Result result){
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
protected MutableHttpRequest<ServerEvent> request(ServerEvent event, Type type) throws Exception {
|
||||
URI baseUri = URI.create(this.url.toString().endsWith("/") ? this.url.toString() : this.url + "/");
|
||||
URI resolvedUri = baseUri.resolve(type.name().toLowerCase());
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import io.kestra.core.models.FetchVersion;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.models.kv.PersistedKvMetadata;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface KvMetadataRepositoryInterface extends SaveRepositoryInterface<PersistedKvMetadata> {
|
||||
Optional<PersistedKvMetadata> findByName(
|
||||
String tenantId,
|
||||
String namespace,
|
||||
String name
|
||||
) throws IOException;
|
||||
|
||||
default ArrayListTotal<PersistedKvMetadata> find(
|
||||
Pageable pageable,
|
||||
String tenantId,
|
||||
List<QueryFilter> filters,
|
||||
boolean allowDeleted,
|
||||
boolean allowExpired
|
||||
) {
|
||||
return this.find(pageable, tenantId, filters, allowDeleted, allowExpired, FetchVersion.LATEST);
|
||||
}
|
||||
|
||||
ArrayListTotal<PersistedKvMetadata> find(
|
||||
Pageable pageable,
|
||||
String tenantId,
|
||||
List<QueryFilter> filters,
|
||||
boolean allowDeleted,
|
||||
boolean allowExpired,
|
||||
FetchVersion fetchBehavior
|
||||
);
|
||||
|
||||
default PersistedKvMetadata delete(PersistedKvMetadata persistedKvMetadata) throws IOException {
|
||||
return this.save(persistedKvMetadata.toBuilder().deleted(true).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Purge (hard delete) a list of persisted kv metadata. If no version is specified, all versions are purged.
|
||||
* @param persistedKvsMetadata the list of persisted kv metadata to purge
|
||||
* @return the number of purged persisted kv metadata
|
||||
*/
|
||||
Integer purge(List<PersistedKvMetadata> persistedKvsMetadata);
|
||||
}
|
||||
@@ -130,7 +130,7 @@ public class FlowInputOutput {
|
||||
private Mono<Map<String, Object>> readData(List<Input<?>> inputs, Execution execution, Publisher<CompletedPart> data, boolean uploadFiles) {
|
||||
return Flux.from(data)
|
||||
.publishOn(Schedulers.boundedElastic())
|
||||
.<AbstractMap.SimpleEntry<String, String>>handle((input, sink) -> {
|
||||
.<Map.Entry<String, String>>handle((input, sink) -> {
|
||||
if (input instanceof CompletedFileUpload fileUpload) {
|
||||
boolean oldStyleInput = false;
|
||||
if ("files".equals(fileUpload.getName())) {
|
||||
@@ -150,7 +150,7 @@ public class FlowInputOutput {
|
||||
.getContextStorageURI()
|
||||
);
|
||||
fileUpload.discard();
|
||||
sink.next(new AbstractMap.SimpleEntry<>(inputId, from.toString()));
|
||||
sink.next(Map.entry(inputId, from.toString()));
|
||||
} else {
|
||||
try {
|
||||
final String fileExtension = FileInput.findFileInputExtension(inputs, fileName);
|
||||
@@ -165,7 +165,7 @@ public class FlowInputOutput {
|
||||
return;
|
||||
}
|
||||
URI from = storageInterface.from(execution, inputId, fileName, tempFile);
|
||||
sink.next(new AbstractMap.SimpleEntry<>(inputId, from.toString()));
|
||||
sink.next(Map.entry(inputId, from.toString()));
|
||||
} finally {
|
||||
if (!tempFile.delete()) {
|
||||
tempFile.deleteOnExit();
|
||||
@@ -178,13 +178,13 @@ public class FlowInputOutput {
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
sink.next(new AbstractMap.SimpleEntry<>(input.getName(), new String(input.getBytes())));
|
||||
sink.next(Map.entry(input.getName(), new String(input.getBytes())));
|
||||
} catch (IOException e) {
|
||||
sink.error(e);
|
||||
}
|
||||
}
|
||||
})
|
||||
.collectMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue);
|
||||
.collectMap(Map.Entry::getKey, Map.Entry::getValue);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -287,9 +287,10 @@ public class FlowInputOutput {
|
||||
Input<?> input = resolvable.get().input();
|
||||
|
||||
try {
|
||||
// resolve all input dependencies and check whether input is enabled
|
||||
final Map<String, InputAndValue> dependencies = resolveAllDependentInputs(input, flow, execution, inputs, decryptSecrets);
|
||||
final RunContext runContext = buildRunContextForExecutionAndInputs(flow, execution, dependencies, decryptSecrets);
|
||||
// Resolve all input dependencies and check whether input is enabled
|
||||
// Note: Secrets are always decrypted here because they can be part of expressions used to render inputs such as SELECT & MULTI_SELECT.
|
||||
final Map<String, InputAndValue> dependencies = resolveAllDependentInputs(input, flow, execution, inputs, true);
|
||||
final RunContext runContext = buildRunContextForExecutionAndInputs(flow, execution, dependencies, true);
|
||||
|
||||
boolean isInputEnabled = dependencies.isEmpty() || dependencies.values().stream().allMatch(InputAndValue::enabled);
|
||||
|
||||
@@ -329,7 +330,8 @@ public class FlowInputOutput {
|
||||
|
||||
// resolve default if needed
|
||||
if (value == null && input.getDefaults() != null) {
|
||||
value = resolveDefaultValue(input, runContext);
|
||||
RunContext runContextForDefault = decryptSecrets ? runContext : buildRunContextForExecutionAndInputs(flow, execution, dependencies, false);
|
||||
value = resolveDefaultValue(input, runContextForDefault);
|
||||
resolvable.isDefault(true);
|
||||
}
|
||||
|
||||
|
||||
@@ -176,6 +176,10 @@ public abstract class RunContext implements PropertyContext {
|
||||
*/
|
||||
public abstract KVStore namespaceKv(String namespace);
|
||||
|
||||
/**
|
||||
* @deprecated use #namespaceKv(String) instead
|
||||
*/
|
||||
@Deprecated(since = "1.1.0", forRemoval = true)
|
||||
public StateStore stateStore() {
|
||||
return new StateStore(this, true);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.input.SecretInput;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.property.PropertyContext;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
@@ -100,7 +99,7 @@ public final class RunVariables {
|
||||
* @return a new immutable {@link Map}.
|
||||
*/
|
||||
static Map<String, Object> of(final AbstractTrigger trigger) {
|
||||
return ImmutableMap.of(
|
||||
return Map.of(
|
||||
"id", trigger.getId(),
|
||||
"type", trigger.getType()
|
||||
);
|
||||
@@ -282,12 +281,15 @@ public final class RunVariables {
|
||||
}
|
||||
|
||||
if (flow != null && flow.getInputs() != null) {
|
||||
// Create a new PropertyContext with 'flow' variables which are required by some pebble expressions.
|
||||
PropertyContextWithVariables context = new PropertyContextWithVariables(propertyContext, Map.of("flow", RunVariables.of(flow)));
|
||||
|
||||
// we add default inputs value from the flow if not already set, this will be useful for triggers
|
||||
flow.getInputs().stream()
|
||||
.filter(input -> input.getDefaults() != null && !inputs.containsKey(input.getId()))
|
||||
.forEach(input -> {
|
||||
try {
|
||||
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, propertyContext));
|
||||
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, context));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
// Silent catch, if an input depends on another input, or a variable that is populated at runtime / input filling time, we can't resolve it here.
|
||||
}
|
||||
@@ -391,4 +393,20 @@ public final class RunVariables {
|
||||
}
|
||||
|
||||
private RunVariables(){}
|
||||
|
||||
private record PropertyContextWithVariables(
|
||||
PropertyContext delegate,
|
||||
Map<String, Object> variables
|
||||
) implements PropertyContext {
|
||||
|
||||
@Override
|
||||
public String render(String inline, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
return delegate.render(inline, variables.isEmpty() ? this.variables : variables);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> render(Map<String, Object> inline, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
return delegate.render(inline, variables.isEmpty() ? this.variables : variables);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
package io.kestra.core.runners.pebble.filters;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import io.pebbletemplates.pebble.error.PebbleException;
|
||||
import io.pebbletemplates.pebble.extension.Filter;
|
||||
import io.pebbletemplates.pebble.template.EvaluationContext;
|
||||
import io.pebbletemplates.pebble.template.PebbleTemplate;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class ChunkFilter implements Filter {
|
||||
@Override
|
||||
public List<String> getArgumentNames() {
|
||||
@@ -30,6 +31,10 @@ public class ChunkFilter implements Filter {
|
||||
throw new PebbleException(null, "'chunk' filter can only be applied to List. Actual type was: " + input.getClass().getName(), lineNumber, self.getName());
|
||||
}
|
||||
|
||||
return Lists.partition((List) input, ((Long) args.get("size")).intValue());
|
||||
Object sizeObj = args.get("size");
|
||||
if (!(sizeObj instanceof Number)) {
|
||||
throw new PebbleException(null, "'chunk' filter argument 'size' must be a number. Actual type was: " + sizeObj.getClass().getName(), lineNumber, self.getName());
|
||||
}
|
||||
return Lists.partition((List) input, ((Number) sizeObj).intValue());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,12 +17,17 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class JqFilter implements Filter {
|
||||
private final Scope scope;
|
||||
// Load Scope once as static to avoid repeated initialization
|
||||
// This improves performance by loading builtin functions only once when the class loads
|
||||
private static final Scope SCOPE;
|
||||
private final List<String> argumentNames = new ArrayList<>();
|
||||
|
||||
static {
|
||||
SCOPE = Scope.newEmptyScope();
|
||||
BuiltinFunctionLoader.getInstance().loadFunctions(Versions.JQ_1_6, SCOPE);
|
||||
}
|
||||
|
||||
public JqFilter() {
|
||||
scope = Scope.newEmptyScope();
|
||||
BuiltinFunctionLoader.getInstance().loadFunctions(Versions.JQ_1_6, scope);
|
||||
this.argumentNames.add("expression");
|
||||
}
|
||||
|
||||
@@ -43,10 +48,7 @@ public class JqFilter implements Filter {
|
||||
|
||||
String pattern = (String) args.get("expression");
|
||||
|
||||
Scope rootScope = Scope.newEmptyScope();
|
||||
BuiltinFunctionLoader.getInstance().loadFunctions(Versions.JQ_1_6, rootScope);
|
||||
try {
|
||||
|
||||
JsonQuery q = JsonQuery.compile(pattern, Versions.JQ_1_6);
|
||||
|
||||
JsonNode in;
|
||||
@@ -59,7 +61,7 @@ public class JqFilter implements Filter {
|
||||
final List<Object> out = new ArrayList<>();
|
||||
|
||||
try {
|
||||
q.apply(scope, in, v -> {
|
||||
q.apply(Scope.newChildScope(SCOPE), in, v -> {
|
||||
if (v instanceof TextNode) {
|
||||
out.add(v.textValue());
|
||||
} else if (v instanceof NullNode) {
|
||||
|
||||
@@ -38,7 +38,7 @@ public class KvFunction implements Function {
|
||||
String key = getKey(args, self, lineNumber);
|
||||
String namespace = (String) args.get(NAMESPACE_ARG);
|
||||
|
||||
Boolean errorOnMissing = Optional.ofNullable((Boolean) args.get(ERROR_ON_MISSING_ARG)).orElse(true);
|
||||
boolean errorOnMissing = Optional.ofNullable((Boolean) args.get(ERROR_ON_MISSING_ARG)).orElse(true);
|
||||
|
||||
Map<String, String> flow = (Map<String, String>) context.getVariable("flow");
|
||||
String flowNamespace = flow.get(NAMESPACE_ARG);
|
||||
@@ -53,11 +53,16 @@ public class KvFunction implements Function {
|
||||
// we didn't check allowedNamespace here as it's checked in the kvStoreService itself
|
||||
value = kvStoreService.get(flowTenantId, namespace, flowNamespace).getValue(key);
|
||||
}
|
||||
} catch (ResourceExpiredException e) {
|
||||
if (errorOnMissing) {
|
||||
throw new PebbleException(e, e.getMessage(), lineNumber, self.getName());
|
||||
}
|
||||
value = Optional.empty();
|
||||
} catch (Exception e) {
|
||||
throw new PebbleException(e, e.getMessage(), lineNumber, self.getName());
|
||||
}
|
||||
|
||||
if (value.isEmpty() && errorOnMissing == Boolean.TRUE) {
|
||||
if (value.isEmpty() && errorOnMissing) {
|
||||
throw new PebbleException(null, "The key '" + key + "' does not exist in the namespace '" + namespace + "'.", lineNumber, self.getName());
|
||||
}
|
||||
|
||||
@@ -85,4 +90,4 @@ public class KvFunction implements Function {
|
||||
|
||||
return (String) args.get(KEY_ARGS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,21 +1,29 @@
|
||||
package io.kestra.core.secret;
|
||||
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.repositories.ArrayListTotal;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Base64;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class SecretService {
|
||||
public class SecretService<META> {
|
||||
private static final String SECRET_PREFIX = "SECRET_";
|
||||
|
||||
private Map<String, String> decodedSecrets;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
private void postConstruct() {
|
||||
this.decode();
|
||||
@@ -46,6 +54,28 @@ public class SecretService {
|
||||
return secret;
|
||||
}
|
||||
|
||||
public ArrayListTotal<META> list(Pageable pageable, String tenantId, List<QueryFilter> filters) throws IOException {
|
||||
final Predicate<String> queryPredicate = filters.stream()
|
||||
.filter(filter -> filter.field().equals(QueryFilter.Field.QUERY) && filter.value() != null)
|
||||
.findFirst()
|
||||
.map(filter -> {
|
||||
if (filter.operation().equals(QueryFilter.Op.EQUALS)) {
|
||||
return (Predicate<String>) s -> Strings.CI.contains(s, (String) filter.value());
|
||||
} else if (filter.operation().equals(QueryFilter.Op.NOT_EQUALS)) {
|
||||
return (Predicate<String>) s -> !Strings.CI.contains(s, (String) filter.value());
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unsupported operation for QUERY filter: " + filter.operation());
|
||||
}
|
||||
})
|
||||
.orElse(s -> true);
|
||||
|
||||
//noinspection unchecked
|
||||
return ArrayListTotal.of(
|
||||
pageable,
|
||||
decodedSecrets.keySet().stream().filter(queryPredicate).map(s -> (META) s).toList()
|
||||
);
|
||||
}
|
||||
|
||||
public Map<String, Set<String>> inheritedSecrets(String tenantId, String namespace) throws IOException {
|
||||
return Map.of(namespace, decodedSecrets.keySet());
|
||||
}
|
||||
|
||||
@@ -140,7 +140,7 @@ public final class JacksonMapper {
|
||||
|
||||
return mapper
|
||||
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
|
||||
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.NON_NULL)
|
||||
.registerModule(new JavaTimeModule())
|
||||
.registerModule(new Jdk8Module())
|
||||
.registerModule(new ParameterNamesModule())
|
||||
@@ -153,7 +153,7 @@ public final class JacksonMapper {
|
||||
|
||||
private static ObjectMapper createIonObjectMapper() {
|
||||
return configure(new IonObjectMapper(new IonFactory(createIonSystem())))
|
||||
.setSerializationInclusion(JsonInclude.Include.ALWAYS)
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS)
|
||||
.registerModule(new IonModule());
|
||||
}
|
||||
|
||||
@@ -172,7 +172,7 @@ public final class JacksonMapper {
|
||||
|
||||
return Pair.of(patch, revert);
|
||||
}
|
||||
|
||||
|
||||
public static JsonNode applyPatchesOnJsonNode(JsonNode jsonObject, List<JsonNode> patches) {
|
||||
for (JsonNode patch : patches) {
|
||||
try {
|
||||
|
||||
@@ -3,15 +3,21 @@ package io.kestra.core.services;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.ConcurrencyLimit;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
@Singleton
|
||||
public class ConcurrencyLimitService {
|
||||
/**
|
||||
* Contains methods to manage concurrency limit.
|
||||
* This is designed to be used by the API, the executor use lower level primitives.
|
||||
*/
|
||||
public interface ConcurrencyLimitService {
|
||||
|
||||
protected static final Set<State.Type> VALID_TARGET_STATES =
|
||||
Set<State.Type> VALID_TARGET_STATES =
|
||||
EnumSet.of(State.Type.RUNNING, State.Type.CANCELLED, State.Type.FAILED);
|
||||
|
||||
/**
|
||||
@@ -19,18 +25,20 @@ public class ConcurrencyLimitService {
|
||||
*
|
||||
* @throws IllegalArgumentException in case the execution is not queued.
|
||||
*/
|
||||
public Execution unqueue(Execution execution, State.Type state) throws QueueException {
|
||||
if (execution.getState().getCurrent() != State.Type.QUEUED) {
|
||||
throw new IllegalArgumentException("Only QUEUED execution can be unqueued");
|
||||
}
|
||||
Execution unqueue(Execution execution, State.Type state) throws QueueException;
|
||||
|
||||
state = (state == null) ? State.Type.RUNNING : state;
|
||||
/**
|
||||
* Find concurrency limits.
|
||||
*/
|
||||
List<ConcurrencyLimit> find(String tenantId);
|
||||
|
||||
// Validate the target state, throwing an exception if the state is invalid
|
||||
if (!VALID_TARGET_STATES.contains(state)) {
|
||||
throw new IllegalArgumentException("Invalid target state: " + state + ". Valid states are: " + VALID_TARGET_STATES);
|
||||
}
|
||||
/**
|
||||
* Update a concurrency limit.
|
||||
*/
|
||||
ConcurrencyLimit update(ConcurrencyLimit concurrencyLimit);
|
||||
|
||||
return execution.withState(state);
|
||||
}
|
||||
/**
|
||||
* Find a concurrency limit by its identifier.
|
||||
*/
|
||||
Optional<ConcurrencyLimit> findById(String tenantId, String namespace, String flowId);
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.input.InputAndValue;
|
||||
import io.kestra.core.models.hierarchies.AbstractGraphTask;
|
||||
import io.kestra.core.models.hierarchies.GraphCluster;
|
||||
import io.kestra.core.models.tasks.FlowableTask;
|
||||
import io.kestra.core.models.tasks.ResolvedTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
@@ -121,21 +122,38 @@ public class ExecutionService {
|
||||
* Retry set the given taskRun in created state
|
||||
* and return the execution in running state
|
||||
**/
|
||||
public Execution retryTask(Execution execution, String taskRunId) {
|
||||
List<TaskRun> newTaskRuns = execution
|
||||
.getTaskRunList()
|
||||
.stream()
|
||||
.map(taskRun -> {
|
||||
if (taskRun.getId().equals(taskRunId)) {
|
||||
return taskRun
|
||||
.withState(State.Type.CREATED);
|
||||
public Execution retryTask(Execution execution, Flow flow, String taskRunId) throws InternalException {
|
||||
TaskRun taskRun = execution.findTaskRunByTaskRunId(taskRunId).withState(State.Type.CREATED);
|
||||
List<TaskRun> taskRunList = execution.getTaskRunList();
|
||||
|
||||
if (taskRun.getParentTaskRunId() != null) {
|
||||
// we need to find the parent to remove any errors or finally tasks already executed
|
||||
TaskRun parentTaskRun = execution.findTaskRunByTaskRunId(taskRun.getParentTaskRunId());
|
||||
Task parentTask = flow.findTaskByTaskId(parentTaskRun.getTaskId());
|
||||
if (parentTask instanceof FlowableTask<?> flowableTask) {
|
||||
if (flowableTask.getErrors() != null) {
|
||||
List<Task> allErrors = Stream.concat(flowableTask.getErrors().stream()
|
||||
.filter(task -> task.isFlowable() && ((FlowableTask<?>) task).getErrors() != null)
|
||||
.flatMap(task -> ((FlowableTask<?>) task).getErrors().stream()),
|
||||
flowableTask.getErrors().stream())
|
||||
.toList();
|
||||
allErrors.forEach(error -> taskRunList.removeIf(t -> t.getTaskId().equals(error.getId())));
|
||||
}
|
||||
|
||||
return taskRun;
|
||||
})
|
||||
.toList();
|
||||
if (flowableTask.getFinally() != null) {
|
||||
List<Task> allFinally = Stream.concat(flowableTask.getFinally().stream()
|
||||
.filter(task -> task.isFlowable() && ((FlowableTask<?>) task).getFinally() != null)
|
||||
.flatMap(task -> ((FlowableTask<?>) task).getFinally().stream()),
|
||||
flowableTask.getFinally().stream())
|
||||
.toList();
|
||||
allFinally.forEach(error -> taskRunList.removeIf(t -> t.getTaskId().equals(error.getId())));
|
||||
}
|
||||
}
|
||||
|
||||
return execution.withTaskRunList(newTaskRuns).withState(State.Type.RUNNING);
|
||||
return execution.withTaskRunList(taskRunList).withTaskRun(taskRun).withState(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
return execution.withTaskRun(taskRun).withState(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
public Execution retryWaitFor(Execution execution, String flowableTaskRunId) {
|
||||
@@ -709,7 +727,7 @@ public class ExecutionService {
|
||||
// An edge case can exist where the execution is resumed automatically before we resume it with a killing.
|
||||
try {
|
||||
newExecution = this.resume(execution, flow, State.Type.KILLING, null);
|
||||
newExecution = newExecution.withState(afterKillState.orElse(newExecution.getState().getCurrent()));
|
||||
newExecution = newExecution.withState(killingOrAfterKillState);
|
||||
} catch (Exception e) {
|
||||
// if we cannot resume, we set it anyway to killing, so we don't throw
|
||||
log.warn("Unable to resume a paused execution before killing it", e);
|
||||
@@ -723,6 +741,7 @@ public class ExecutionService {
|
||||
// immediately without publishing a CrudEvent like it's done on pause/resume method.
|
||||
return newExecution;
|
||||
}
|
||||
|
||||
public Execution kill(Execution execution, FlowInterface flow) {
|
||||
return this.kill(execution, flow, Optional.empty());
|
||||
}
|
||||
|
||||
@@ -285,6 +285,10 @@ public class FlowService {
|
||||
if ((subflowId != null && subflowId.matches(regex)) || (namespace != null && namespace.matches(regex))) {
|
||||
return;
|
||||
}
|
||||
if (subflowId == null || namespace == null) {
|
||||
// those fields are mandatory so the mandatory validation will apply
|
||||
return;
|
||||
}
|
||||
Optional<Flow> optional = findById(tenantId, subflow.getNamespace(), subflow.getFlowId());
|
||||
|
||||
if (optional.isEmpty()) {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
package io.kestra.core.services;
|
||||
|
||||
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.kv.InternalKVStore;
|
||||
import io.kestra.core.storages.kv.KVStore;
|
||||
import io.kestra.core.storages.kv.KVStoreException;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
@@ -12,6 +14,8 @@ import java.io.IOException;
|
||||
|
||||
@Singleton
|
||||
public class KVStoreService {
|
||||
@Inject
|
||||
private KvMetadataRepositoryInterface kvMetadataRepository;
|
||||
|
||||
@Inject
|
||||
private StorageInterface storageInterface;
|
||||
@@ -46,9 +50,9 @@ public class KVStoreService {
|
||||
boolean checkIfNamespaceExists = fromNamespace == null || isNotParentNamespace(namespace, fromNamespace);
|
||||
if (checkIfNamespaceExists && !namespaceService.isNamespaceExists(tenant, namespace)) {
|
||||
// if it didn't exist, we still check if there are KV as you can add KV without creating a namespace in DB or having flows in it
|
||||
KVStore kvStore = new InternalKVStore(tenant, namespace, storageInterface);
|
||||
KVStore kvStore = new InternalKVStore(tenant, namespace, storageInterface, kvMetadataRepository);
|
||||
try {
|
||||
if (kvStore.list().isEmpty()) {
|
||||
if (kvStore.list(Pageable.from(1, 1)).isEmpty()) {
|
||||
throw new KVStoreException(String.format(
|
||||
"Cannot access the KV store. The namespace '%s' does not exist.",
|
||||
namespace
|
||||
@@ -60,7 +64,7 @@ public class KVStoreService {
|
||||
return kvStore;
|
||||
}
|
||||
|
||||
return new InternalKVStore(tenant, namespace, storageInterface);
|
||||
return new InternalKVStore(tenant, namespace, storageInterface, kvMetadataRepository);
|
||||
}
|
||||
|
||||
private static boolean isNotParentNamespace(final String parentNamespace, final String childNamespace) {
|
||||
|
||||
@@ -58,10 +58,10 @@ import java.util.stream.Stream;
|
||||
public class PluginDefaultService {
|
||||
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofYaml()
|
||||
.copy()
|
||||
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
|
||||
private static final ObjectMapper OBJECT_MAPPER = JacksonMapper.ofYaml().copy()
|
||||
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.NON_NULL);
|
||||
private static final String PLUGIN_DEFAULTS_FIELD = "pluginDefaults";
|
||||
|
||||
private static final TypeReference<List<PluginDefault>> PLUGIN_DEFAULTS_TYPE_REF = new TypeReference<>() {
|
||||
|
||||
@@ -17,6 +17,10 @@ import java.net.URI;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* @deprecated use KVStore instead
|
||||
*/
|
||||
@Deprecated(since = "1.1.0", forRemoval = true)
|
||||
public record StateStore(RunContext runContext, boolean hashTaskRunValue) {
|
||||
|
||||
public InputStream getState(String stateName, @Nullable String stateSubName, String taskRunValue) throws IOException, ResourceExpiredException {
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.google.common.annotations.VisibleForTesting;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.TaskRun;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowId;
|
||||
import io.kestra.core.utils.Hashing;
|
||||
import io.kestra.core.utils.Slugify;
|
||||
import jakarta.annotation.Nullable;
|
||||
@@ -61,11 +62,11 @@ public class StorageContext {
|
||||
taskRun.getValue()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Factory method for constructing a new {@link StorageContext} scoped to a given {@link Flow}.
|
||||
*/
|
||||
public static StorageContext forFlow(Flow flow) {
|
||||
public static StorageContext forFlow(FlowId flow) {
|
||||
return new StorageContext(flow.getTenantId(), flow.getNamespace(), flow.getId());
|
||||
}
|
||||
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
package io.kestra.core.storages.kv;
|
||||
|
||||
import io.kestra.core.exceptions.ResourceExpiredException;
|
||||
import io.kestra.core.models.FetchVersion;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.models.kv.PersistedKvMetadata;
|
||||
import io.kestra.core.repositories.ArrayListTotal;
|
||||
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.storages.FileAttributes;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.StorageObject;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import io.micronaut.data.model.Sort;
|
||||
import jakarta.annotation.Nullable;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
@@ -25,6 +34,7 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
* The default {@link KVStore} implementation.
|
||||
*
|
||||
*/
|
||||
@Slf4j
|
||||
public class InternalKVStore implements KVStore {
|
||||
|
||||
private static final Pattern DURATION_PATTERN = Pattern.compile("^P(?=[^T]|T.)(?:\\d*D)?(?:T(?=.)(?:\\d*H)?(?:\\d*M)?(?:\\d*S)?)?$");
|
||||
@@ -32,6 +42,7 @@ public class InternalKVStore implements KVStore {
|
||||
private final String namespace;
|
||||
private final String tenant;
|
||||
private final StorageInterface storage;
|
||||
private final KvMetadataRepositoryInterface kvMetadataRepository;
|
||||
|
||||
/**
|
||||
* Creates a new {@link InternalKVStore} instance.
|
||||
@@ -40,10 +51,11 @@ public class InternalKVStore implements KVStore {
|
||||
* @param tenant The tenant.
|
||||
* @param storage The storage.
|
||||
*/
|
||||
public InternalKVStore(@Nullable final String tenant, final String namespace, final StorageInterface storage) {
|
||||
this.namespace = Objects.requireNonNull(namespace, "namespace cannot be null");
|
||||
public InternalKVStore(@Nullable final String tenant, @Nullable final String namespace, final StorageInterface storage, final KvMetadataRepositoryInterface kvMetadataRepository) {
|
||||
this.namespace = namespace;
|
||||
this.storage = Objects.requireNonNull(storage, "storage cannot be null");
|
||||
this.tenant = tenant;
|
||||
this.kvMetadataRepository = kvMetadataRepository;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -66,9 +78,18 @@ public class InternalKVStore implements KVStore {
|
||||
"Cannot set value for key '%s'. Key already exists and `overwrite` is set to `false`.", key));
|
||||
}
|
||||
|
||||
byte[] serialized = JacksonMapper.ofIon().writeValueAsBytes(value.value());
|
||||
Object actualValue = value.value();
|
||||
byte[] serialized = actualValue instanceof Duration ? actualValue.toString().getBytes(StandardCharsets.UTF_8) : JacksonMapper.ofIon().writeValueAsBytes(actualValue);
|
||||
|
||||
this.storage.put(this.tenant, this.namespace, this.storageUri(key), new StorageObject(
|
||||
PersistedKvMetadata saved = this.kvMetadataRepository.save(PersistedKvMetadata.builder()
|
||||
.tenantId(this.tenant)
|
||||
.namespace(this.namespace)
|
||||
.name(key)
|
||||
.description(Optional.ofNullable(value.metadata()).map(KVMetadata::getDescription).orElse(null))
|
||||
.expirationDate(Optional.ofNullable(value.metadata()).map(KVMetadata::getExpirationDate).orElse(null))
|
||||
.deleted(false)
|
||||
.build());
|
||||
this.storage.put(this.tenant, this.namespace, this.storageUri(key, saved.getVersion()), new StorageObject(
|
||||
value.metadataAsMap(),
|
||||
new ByteArrayInputStream(serialized)
|
||||
));
|
||||
@@ -91,20 +112,30 @@ public class InternalKVStore implements KVStore {
|
||||
public Optional<String> getRawValue(String key) throws IOException, ResourceExpiredException {
|
||||
KVStore.validateKey(key);
|
||||
|
||||
Optional<PersistedKvMetadata> maybeMetadata = this.kvMetadataRepository.findByName(this.tenant, this.namespace, key);
|
||||
|
||||
int version = maybeMetadata.map(PersistedKvMetadata::getVersion).orElse(1);
|
||||
if (maybeMetadata.isPresent()) {
|
||||
PersistedKvMetadata metadata = maybeMetadata.get();
|
||||
if (metadata.isDeleted()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
if (Optional.ofNullable(metadata.getExpirationDate()).map(Instant.now()::isAfter).orElse(false)) {
|
||||
this.delete(key);
|
||||
throw new ResourceExpiredException("The requested value has expired");
|
||||
}
|
||||
}
|
||||
|
||||
StorageObject withMetadata;
|
||||
try {
|
||||
withMetadata = this.storage.getWithMetadata(this.tenant, this.namespace, this.storageUri(key));
|
||||
withMetadata = this.storage.getWithMetadata(this.tenant, this.namespace, this.storageUri(key, version));
|
||||
} catch (FileNotFoundException e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
KVValueAndMetadata kvStoreValueWrapper = KVValueAndMetadata.from(withMetadata);
|
||||
|
||||
Instant expirationDate = kvStoreValueWrapper.metadata().getExpirationDate();
|
||||
if (expirationDate != null && Instant.now().isAfter(expirationDate)) {
|
||||
this.delete(key);
|
||||
throw new ResourceExpiredException("The requested value has expired");
|
||||
}
|
||||
return Optional.of((String)(kvStoreValueWrapper.value()));
|
||||
return Optional.of((String) (kvStoreValueWrapper.value()));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -113,26 +144,14 @@ public class InternalKVStore implements KVStore {
|
||||
@Override
|
||||
public boolean delete(String key) throws IOException {
|
||||
KVStore.validateKey(key);
|
||||
URI uri = this.storageUri(key);
|
||||
boolean deleted = this.storage.delete(this.tenant, this.namespace, uri);
|
||||
URI metadataURI = URI.create(uri.getPath() + ".metadata");
|
||||
if (this.storage.exists(this.tenant, this.namespace, metadataURI)){
|
||||
this.storage.delete(this.tenant, this.namespace, metadataURI);
|
||||
Optional<PersistedKvMetadata> maybeMetadata = this.kvMetadataRepository.findByName(this.tenant, this.namespace, key);
|
||||
if (maybeMetadata.map(PersistedKvMetadata::isDeleted).orElse(true)) {
|
||||
return false;
|
||||
}
|
||||
return deleted;
|
||||
|
||||
}
|
||||
this.kvMetadataRepository.delete(maybeMetadata.get());
|
||||
return true;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public List<KVEntry> list() throws IOException {
|
||||
List<FileAttributes> list = listAllFromStorage();
|
||||
return list.stream()
|
||||
.map(throwFunction(KVEntry::from))
|
||||
.filter(kvEntry -> Optional.ofNullable(kvEntry.expirationDate()).map(expirationDate -> Instant.now().isBefore(expirationDate)).orElse(true))
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -140,18 +159,26 @@ public class InternalKVStore implements KVStore {
|
||||
*/
|
||||
@Override
|
||||
public List<KVEntry> listAll() throws IOException {
|
||||
List<FileAttributes> list = listAllFromStorage();
|
||||
return list.stream()
|
||||
.map(throwFunction(KVEntry::from))
|
||||
.toList();
|
||||
return this.list(Pageable.UNPAGED, Collections.emptyList(), true, true, FetchVersion.ALL);
|
||||
}
|
||||
|
||||
private List<FileAttributes> listAllFromStorage() throws IOException {
|
||||
try {
|
||||
return this.storage.list(this.tenant, this.namespace, this.storageUri(null));
|
||||
} catch (FileNotFoundException e) {
|
||||
return Collections.emptyList();
|
||||
@Override
|
||||
public ArrayListTotal<KVEntry> list(Pageable pageable, List<QueryFilter> filters, boolean allowDeleted, boolean allowExpired, FetchVersion fetchBehavior) throws IOException {
|
||||
if (this.namespace != null) {
|
||||
filters = Stream.concat(
|
||||
filters.stream(),
|
||||
Stream.of(QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value(this.namespace).build())
|
||||
).toList();
|
||||
}
|
||||
|
||||
return this.kvMetadataRepository.find(
|
||||
pageable,
|
||||
this.tenant,
|
||||
filters,
|
||||
allowDeleted,
|
||||
allowExpired,
|
||||
fetchBehavior
|
||||
).map(throwFunction(KVEntry::from));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -161,15 +188,39 @@ public class InternalKVStore implements KVStore {
|
||||
public Optional<KVEntry> get(final String key) throws IOException {
|
||||
KVStore.validateKey(key);
|
||||
|
||||
try {
|
||||
KVEntry entry = KVEntry.from(this.storage.getAttributes(this.tenant, this.namespace, this.storageUri(key)));
|
||||
if (entry.expirationDate() != null && Instant.now().isAfter(entry.expirationDate())) {
|
||||
this.delete(key);
|
||||
return Optional.empty();
|
||||
}
|
||||
return Optional.of(entry);
|
||||
} catch (FileNotFoundException e) {
|
||||
Optional<PersistedKvMetadata> maybeMetadata = this.kvMetadataRepository.findByName(this.tenant, this.namespace, key);
|
||||
if (maybeMetadata.isEmpty() || maybeMetadata.get().isDeleted()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return Optional.of(KVEntry.from(maybeMetadata.get()));
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public Integer purge(List<KVEntry> kvEntries) throws IOException {
|
||||
Integer purgedMetadataCount = this.kvMetadataRepository.purge(kvEntries.stream().map(kv -> PersistedKvMetadata.from(tenant, kv)).toList());
|
||||
|
||||
long actualDeletedEntries = kvEntries.stream()
|
||||
.map(KVEntry::key)
|
||||
.map(this::storageUri)
|
||||
.map(throwFunction(uri -> {
|
||||
boolean deleted = this.storage.delete(tenant, namespace, uri);
|
||||
URI metadataURI = URI.create(uri.getPath() + ".metadata");
|
||||
if (this.storage.exists(this.tenant, this.namespace, metadataURI)) {
|
||||
this.storage.delete(this.tenant, this.namespace, metadataURI);
|
||||
}
|
||||
|
||||
return deleted;
|
||||
})).filter(Boolean::booleanValue)
|
||||
.count();
|
||||
|
||||
if (actualDeletedEntries != purgedMetadataCount) {
|
||||
log.warn("KV Metadata purge reported {} deleted entries, but {} values were actually deleted from storage", purgedMetadataCount, actualDeletedEntries);
|
||||
}
|
||||
|
||||
return purgedMetadataCount;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.storages.kv;
|
||||
|
||||
import io.kestra.core.models.kv.PersistedKvMetadata;
|
||||
import io.kestra.core.storages.FileAttributes;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
||||
@@ -7,12 +8,23 @@ import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public record KVEntry(String key, @Nullable String description, Instant creationDate, Instant updateDate, @Nullable Instant expirationDate) {
|
||||
public static KVEntry from(FileAttributes fileAttributes) throws IOException {
|
||||
public record KVEntry(String namespace, String key, Integer version, @Nullable String description, Instant creationDate, Instant updateDate, @Nullable Instant expirationDate) {
|
||||
private static final Pattern captureKeyAndVersion = Pattern.compile("(.*)\\.ion(?:\\.v(\\d+))?$");
|
||||
|
||||
public static KVEntry from(String namespace, FileAttributes fileAttributes) throws IOException {
|
||||
Optional<KVMetadata> kvMetadata = Optional.ofNullable(fileAttributes.getMetadata()).map(KVMetadata::new);
|
||||
String fileName = fileAttributes.getFileName();
|
||||
Matcher matcher = captureKeyAndVersion.matcher(fileName);
|
||||
if (!matcher.matches()) {
|
||||
throw new IOException("Invalid KV file name format: " + fileName);
|
||||
}
|
||||
return new KVEntry(
|
||||
fileAttributes.getFileName().replace(".ion", ""),
|
||||
namespace,
|
||||
matcher.group(1),
|
||||
Optional.ofNullable(matcher.group(2)).map(Integer::parseInt).orElse(1),
|
||||
kvMetadata.map(KVMetadata::getDescription).orElse(null),
|
||||
Instant.ofEpochMilli(fileAttributes.getCreationTime()),
|
||||
Instant.ofEpochMilli(fileAttributes.getLastModifiedTime()),
|
||||
@@ -21,4 +33,8 @@ public record KVEntry(String key, @Nullable String description, Instant creation
|
||||
.orElse(null)
|
||||
);
|
||||
}
|
||||
|
||||
public static KVEntry from(PersistedKvMetadata persistedKvMetadata) throws IOException {
|
||||
return new KVEntry(persistedKvMetadata.getNamespace(), persistedKvMetadata.getName(), persistedKvMetadata.getVersion(), persistedKvMetadata.getDescription(), persistedKvMetadata.getCreated(), persistedKvMetadata.getUpdated(), persistedKvMetadata.getExpirationDate());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
package io.kestra.core.storages.kv;
|
||||
|
||||
import io.kestra.core.exceptions.ResourceExpiredException;
|
||||
import io.kestra.core.models.FetchVersion;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.repositories.ArrayListTotal;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.URI;
|
||||
import java.time.Duration;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Pattern;
|
||||
@@ -25,11 +29,15 @@ public interface KVStore {
|
||||
String namespace();
|
||||
|
||||
default URI storageUri(String key) {
|
||||
return this.storageUri(key, namespace());
|
||||
return this.storageUri(key, 1);
|
||||
}
|
||||
|
||||
default URI storageUri(String key, String namespace) {
|
||||
String filePath = key == null ? "" : ("/" + key + ".ion");
|
||||
default URI storageUri(String key, int version) {
|
||||
return this.storageUri(key, namespace(), version);
|
||||
}
|
||||
|
||||
default URI storageUri(String key, String namespace, int version) {
|
||||
String filePath = key == null ? "" : ("/" + key + ".ion") + (version > 1 ? (".v" + version) : "");
|
||||
return URI.create(StorageContext.KESTRA_PROTOCOL + StorageContext.kvPrefix(namespace) + filePath);
|
||||
}
|
||||
|
||||
@@ -72,13 +80,30 @@ public interface KVStore {
|
||||
*/
|
||||
boolean delete(String key) throws IOException;
|
||||
|
||||
/**
|
||||
* Purge the provided KV entries.
|
||||
*/
|
||||
Integer purge(List<KVEntry> kvToDelete) throws IOException;
|
||||
|
||||
default ArrayListTotal<KVEntry> list() throws IOException {
|
||||
return this.list(Pageable.UNPAGED);
|
||||
}
|
||||
|
||||
default ArrayListTotal<KVEntry> list(Pageable pageable) throws IOException {
|
||||
return this.list(pageable, Collections.emptyList());
|
||||
}
|
||||
|
||||
default ArrayListTotal<KVEntry> list(Pageable pageable, List<QueryFilter> queryFilters) throws IOException {
|
||||
return this.list(pageable, queryFilters, false, false, FetchVersion.LATEST);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all the K/V store entries.
|
||||
*
|
||||
* @return The list of {@link KVEntry}.
|
||||
* @throws IOException if an error occurred while executing the operation on the K/V store.
|
||||
*/
|
||||
List<KVEntry> list() throws IOException;
|
||||
ArrayListTotal<KVEntry> list(Pageable pageable, List<QueryFilter> queryFilters, boolean allowDeleted, boolean allowExpired, FetchVersion fetchBehavior) throws IOException;
|
||||
|
||||
/**
|
||||
* Lists all the K/V store entries, expired or not.
|
||||
@@ -97,22 +122,22 @@ public interface KVStore {
|
||||
Optional<KVEntry> get(String key) throws IOException;
|
||||
|
||||
/**
|
||||
* Checks whether a K/V entry exists for teh given key.
|
||||
* Checks whether a K/V entry exists for the given key.
|
||||
*
|
||||
* @param key The entry key.
|
||||
* @return {@code true} of an entry exists.
|
||||
* @throws IOException if an error occurred while executing the operation on the K/V store.
|
||||
*/
|
||||
default boolean exists(String key) throws IOException {
|
||||
return list().stream().anyMatch(kvEntry -> kvEntry.key().equals(key));
|
||||
return get(key).isPresent();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Finds a KV entry with associated metadata for a given key.
|
||||
*
|
||||
* @param key the KV entry key.
|
||||
* @return an optional of {@link KVValueAndMetadata}.
|
||||
*
|
||||
*
|
||||
* @throws UncheckedIOException if an error occurred while executing the operation on the K/V store.
|
||||
*/
|
||||
default Optional<KVValueAndMetadata> findMetadataAndValue(final String key) throws UncheckedIOException {
|
||||
@@ -132,7 +157,7 @@ public interface KVStore {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Pattern KEY_VALIDATOR_PATTERN = Pattern.compile("[a-zA-Z0-9][a-zA-Z0-9._-]*");
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
public class RegexPatterns {
|
||||
public static final String JAVA_IDENTIFIER_REGEX = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$";
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
package io.kestra.core.validations;
|
||||
|
||||
import io.kestra.core.validations.validator.InputValidator;
|
||||
import jakarta.validation.Constraint;
|
||||
import jakarta.validation.Payload;
|
||||
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Constraint(validatedBy = InputValidator.class)
|
||||
public @interface InputValidation {
|
||||
String message() default "invalid input";
|
||||
Class<?>[] groups() default {};
|
||||
Class<? extends Payload>[] payload() default {};
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
package io.kestra.core.validations;
|
||||
|
||||
import io.kestra.core.validations.validator.KvVersionBehaviorValidator;
|
||||
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import jakarta.validation.Constraint;
|
||||
import jakarta.validation.Payload;
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Constraint(validatedBy = KvVersionBehaviorValidator.class)
|
||||
public @interface KvVersionBehaviorValidation {
|
||||
String message() default "invalid `version` behavior configuration";
|
||||
Class<?>[] groups() default {};
|
||||
Class<? extends Payload>[] payload() default {};
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
package io.kestra.core.validations.validator;
|
||||
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.runners.VariableRenderer;
|
||||
import io.kestra.core.validations.InputValidation;
|
||||
import io.micronaut.core.annotation.AnnotationValue;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.core.annotation.NonNull;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.micronaut.validation.validator.constraints.ConstraintValidator;
|
||||
import io.micronaut.validation.validator.constraints.ConstraintValidatorContext;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
@Singleton
|
||||
@Introspected
|
||||
public class InputValidator implements ConstraintValidator<InputValidation, Input<?>> {
|
||||
|
||||
@Inject
|
||||
VariableRenderer variableRenderer;
|
||||
|
||||
@Override
|
||||
public boolean isValid(@Nullable Input<?> value, @NonNull AnnotationValue<InputValidation> annotationMetadata, @NonNull ConstraintValidatorContext context) {
|
||||
if (value == null) {
|
||||
return true; // nulls are allowed according to spec
|
||||
}
|
||||
|
||||
if (value.getDefaults() != null && Boolean.FALSE.equals(value.getRequired())) {
|
||||
context.disableDefaultConstraintViolation();
|
||||
context
|
||||
.buildConstraintViolationWithTemplate("Inputs with a default value must be required, since the default is always applied.")
|
||||
.addConstraintViolation();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value.getDefaults() != null && value.getPrefill() != null) {
|
||||
context.disableDefaultConstraintViolation();
|
||||
context
|
||||
.buildConstraintViolationWithTemplate("Inputs with a default value cannot also have a prefill.")
|
||||
.addConstraintViolation();
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
package io.kestra.core.validations.validator;
|
||||
|
||||
import io.kestra.core.validations.KvVersionBehaviorValidation;
|
||||
import io.kestra.plugin.core.kv.Version;
|
||||
import io.micronaut.core.annotation.AnnotationValue;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.core.annotation.NonNull;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.micronaut.validation.validator.constraints.ConstraintValidator;
|
||||
import io.micronaut.validation.validator.constraints.ConstraintValidatorContext;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
@Singleton
|
||||
@Introspected
|
||||
public class KvVersionBehaviorValidator implements ConstraintValidator<KvVersionBehaviorValidation, Version> {
|
||||
@Override
|
||||
public boolean isValid(
|
||||
@Nullable Version value,
|
||||
@NonNull AnnotationValue<KvVersionBehaviorValidation> annotationMetadata,
|
||||
@NonNull ConstraintValidatorContext context) {
|
||||
if (value == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (value.getBefore() != null && value.getKeepAmount() != null) {
|
||||
context.disableDefaultConstraintViolation();
|
||||
context.buildConstraintViolationWithTemplate("Cannot set both 'before' and 'keepAmount' properties")
|
||||
.addConstraintViolation();
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -40,14 +40,14 @@ import jakarta.validation.constraints.NotNull;
|
||||
- id: hello
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Average value has gone below 10
|
||||
|
||||
|
||||
triggers:
|
||||
- id: expression_trigger
|
||||
type: io.kestra.plugin.core.trigger.Schedule
|
||||
cron: "*/1 * * * *"
|
||||
conditions:
|
||||
- type: io.kestra.plugin.core.condition.Expression
|
||||
expression: "{{ kv('average_value') < 10 }}"
|
||||
expression: "{{ kv('average_value') < 10 }}"
|
||||
"""
|
||||
)
|
||||
},
|
||||
|
||||
@@ -16,6 +16,7 @@ import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.Map;
|
||||
|
||||
@SuperBuilder
|
||||
@ToString
|
||||
@@ -82,7 +83,7 @@ public class PublicHoliday extends Condition implements ScheduleCondition {
|
||||
)
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
private Property<String> date = Property.ofExpression("{{ trigger.date }}");
|
||||
private Property<String> date = Property.ofExpression("{{ trigger.date}}");
|
||||
|
||||
@Schema(
|
||||
title = "[ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) country code. If not set, it uses the country code from the default locale.",
|
||||
@@ -98,11 +99,12 @@ public class PublicHoliday extends Condition implements ScheduleCondition {
|
||||
|
||||
@Override
|
||||
public boolean test(ConditionContext conditionContext) throws InternalException {
|
||||
Map<String, Object> variables=conditionContext.getVariables();
|
||||
var renderedCountry = conditionContext.getRunContext().render(this.country).as(String.class).orElse(null);
|
||||
var renderedSubDivision = conditionContext.getRunContext().render(this.subDivision).as(String.class).orElse(null);
|
||||
|
||||
HolidayManager holidayManager = renderedCountry != null ? HolidayManager.getInstance(ManagerParameters.create(renderedCountry)) : HolidayManager.getInstance();
|
||||
LocalDate currentDate = DateUtils.parseLocalDate(conditionContext.getRunContext().render(date).as(String.class).orElseThrow());
|
||||
LocalDate currentDate = DateUtils.parseLocalDate(conditionContext.getRunContext().render(date).as(String.class,variables).orElseThrow());
|
||||
return renderedSubDivision == null ? holidayManager.isHoliday(currentDate) : holidayManager.isHoliday(currentDate, renderedSubDivision);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ import lombok.experimental.SuperBuilder;
|
||||
content: |
|
||||
## Execution Success Rate
|
||||
This chart displays the percentage of successful executions over time.
|
||||
|
||||
|
||||
- A **higher success rate** indicates stable and reliable workflows.
|
||||
|
||||
- Sudden **drops** may signal issues in task execution or external dependencies.
|
||||
|
||||
@@ -57,7 +57,7 @@ import lombok.experimental.SuperBuilder;
|
||||
field: DURATION
|
||||
agg: SUM
|
||||
graphStyle: LINES
|
||||
"""
|
||||
"""
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import lombok.Getter;
|
||||
|
||||
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
|
||||
|
||||
@Getter
|
||||
@JsonTypeInfo(
|
||||
use = JsonTypeInfo.Id.NAME,
|
||||
@@ -20,6 +22,6 @@ import lombok.Getter;
|
||||
public class MarkdownSource {
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[A-Za-z_$][A-Za-z0-9_$]*(\\.[A-Za-z_$][A-Za-z0-9_$]*)*$")
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
private String type;
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ import lombok.experimental.SuperBuilder;
|
||||
displayName: Executions
|
||||
agg: COUNT
|
||||
"""
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -24,8 +24,10 @@ import java.util.Optional;
|
||||
@NoArgsConstructor
|
||||
@Schema(
|
||||
title = "Return a value for debugging purposes.",
|
||||
description = "This task is mostly useful for troubleshooting.\n\n" +
|
||||
"It allows you to return some templated functions, inputs or outputs. In some cases you might want to trim all white spaces from the rendered values so downstream tasks can use them properly"
|
||||
description = """
|
||||
This task is mostly useful for troubleshooting.
|
||||
|
||||
It allows you to return some templated functions, inputs or outputs. In some cases you might want to trim all white spaces from the rendered values so downstream tasks can use them properly."""
|
||||
)
|
||||
@Plugin(
|
||||
examples = {
|
||||
|
||||
@@ -102,7 +102,7 @@ public class Switch extends Task implements FlowableTask<Switch.Output> {
|
||||
@Schema(
|
||||
title = "The map of keys and a list of tasks to be executed if the conditional `value` matches the key"
|
||||
)
|
||||
@PluginProperty
|
||||
@PluginProperty(additionalProperties = Task[].class)
|
||||
private Map<String, List<Task>> cases;
|
||||
|
||||
@Valid
|
||||
|
||||
@@ -16,6 +16,7 @@ import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hc.core5.net.URIBuilder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
@@ -29,6 +30,7 @@ import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -51,6 +53,8 @@ public abstract class AbstractHttp extends Task implements HttpInterface {
|
||||
|
||||
protected Property<Map<String, Object>> formData;
|
||||
|
||||
protected Property<Map<String, Object>> params;
|
||||
|
||||
@Builder.Default
|
||||
protected Property<String> contentType = Property.ofValue("application/json");
|
||||
|
||||
@@ -100,9 +104,29 @@ public abstract class AbstractHttp extends Task implements HttpInterface {
|
||||
protected HttpRequest request(RunContext runContext) throws IllegalVariableEvaluationException, URISyntaxException, IOException {
|
||||
// ideally we should URLEncode the path of the UI, but as we cannot URLEncode everything, we handle the common case of space in the URI.
|
||||
String renderedUri = runContext.render(this.uri).as(String.class).map(s -> s.replace(" ", "%20")).orElseThrow();
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(renderedUri);
|
||||
|
||||
if (this.params != null) {
|
||||
runContext
|
||||
.render(this.params)
|
||||
.asMap(String.class, Object.class)
|
||||
.forEach((s, o) -> {
|
||||
if (o instanceof List<?> oList) {
|
||||
oList.stream().map(Object::toString).forEach(s1 -> {
|
||||
uriBuilder.addParameter(s, s1);
|
||||
});
|
||||
} else if (o instanceof String oString) {
|
||||
uriBuilder.addParameter(s, oString);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unsupported param type: " + o.getClass());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
HttpRequest.HttpRequestBuilder request = HttpRequest.builder()
|
||||
.method(runContext.render(this.method).as(String.class).orElse(null))
|
||||
.uri(new URI(renderedUri));
|
||||
.uri(uriBuilder.build());
|
||||
|
||||
var renderedFormData = runContext.render(this.formData).asMap(String.class, Object.class);
|
||||
if (!renderedFormData.isEmpty()) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user