Compare commits

..

1 Commits

Author SHA1 Message Date
Roman Acevedo
01af20ad6d fix(executions): make state_duration generated on queries
- fixes https://github.com/kestra-io/kestra/issues/11593
2025-10-06 10:11:44 +02:00
747 changed files with 19387 additions and 34967 deletions

View File

@@ -32,6 +32,11 @@ In the meantime, you can move onto the next step...
### Development:
- (Optional) By default, your dev server will target `localhost:8080`. If your backend is running elsewhere, you can create `.env.development.local` under `ui` folder with this content:
```
VITE_APP_API_URL={myApiUrl}
```
- Navigate into the `ui` folder and run `npm install` to install the dependencies for the frontend project.
- Now go to the `cli/src/main/resources` folder and create a `application-override.yml` file.

View File

@@ -32,7 +32,7 @@ Watch out for duplicates! If you are creating a new issue, please check existing
#### Requirements
The following dependencies are required to build Kestra locally:
- Java 21+
- Node 22+ and npm 10+
- Node 18+ and npm
- Python 3, pip and python venv
- Docker & Docker Compose
- an IDE (Intellij IDEA, Eclipse or VS Code)

View File

@@ -4,7 +4,7 @@ body:
- type: markdown
attributes:
value: |
Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack). Don't forget to give us a star! ⭐
Thanks for reporting an issue! Please provide a [Minimal Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) and share any additional information that may help reproduce, troubleshoot, and hopefully fix the issue, including screenshots, error traceback, and your Kestra server logs. For quick questions, you can contact us directly on [Slack](https://kestra.io/slack).
- type: textarea
attributes:
label: Describe the issue

View File

@@ -4,7 +4,7 @@ body:
- type: textarea
attributes:
label: Feature description
placeholder: Tell us more about your feature request. Don't forget to give us a star! ⭐
placeholder: Tell us more about your feature request
validations:
required: true
labels:

View File

@@ -26,10 +26,6 @@ updates:
open-pull-requests-limit: 50
labels:
- "dependency-upgrade"
ignore:
- dependency-name: "com.google.protobuf:*"
# Ignore versions of Protobuf that are equal to or greater than 4.0.0 as Orc still uses 3
versions: [ "[4,)" ]
# Maintain dependencies for NPM modules
- package-ecosystem: "npm"

View File

@@ -35,4 +35,4 @@ Remove this section if this change applies to all flows or to the documentation
If there are no setup requirements, you can remove this section.
Thank you for your contribution. ❤️ Don't forget to give us a star! ⭐ -->
Thank you for your contribution. ❤️ -->

View File

@@ -2,7 +2,7 @@ name: Auto-Translate UI keys and create PR
on:
schedule:
- cron: "0 9-21/3 * * 1-5" # Every 3 hours from 9 AM to 9 PM, Monday to Friday
- cron: "0 9-21/3 * * *" # Every 3 hours from 9 AM to 9 PM
workflow_dispatch:
inputs:
retranslate_modified_keys:
@@ -39,7 +39,7 @@ jobs:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
- name: Set up Node
uses: actions/setup-node@v6
uses: actions/setup-node@v5
with:
node-version: "20.x"

View File

@@ -40,7 +40,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v4
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -58,7 +58,7 @@ jobs:
- name: Setup gradle
if: ${{ matrix.language == 'java' }}
uses: gradle/actions/setup-gradle@v5
uses: gradle/actions/setup-gradle@v4
- name: Build with Gradle
if: ${{ matrix.language == 'java' }}
@@ -68,7 +68,7 @@ jobs:
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
if: ${{ matrix.language != 'java' }}
uses: github/codeql-action/autobuild@v4
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -82,4 +82,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4
uses: github/codeql-action/analyze@v3

View File

@@ -67,24 +67,20 @@ jobs:
end:
runs-on: ubuntu-latest
needs: [backend-tests, frontend-tests, publish-develop-docker, publish-develop-maven]
if: "always() && github.repository == 'kestra-io/kestra'"
needs: [publish-develop-docker, publish-develop-maven]
if: always()
steps:
- run: echo "end CI of failed or success"
- name: Trigger EE Workflow
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4
if: "!contains(needs.*.result, 'failure') && github.ref == 'refs/heads/develop'"
uses: peter-evans/repository-dispatch@v3
if: github.ref == 'refs/heads/develop' && needs.release.result == 'success'
with:
token: ${{ secrets.GH_PERSONAL_TOKEN }}
repository: kestra-io/kestra-ee
event-type: "oss-updated"
# Slack
- run: echo "mark job as failure to forward error to Slack action" && exit 1
if: ${{ contains(needs.*.result, 'failure') }}
- name: Slack - Notification
if: ${{ always() && contains(needs.*.result, 'failure') }}
if: ${{ failure() && env.SLACK_WEBHOOK_URL != 0 && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') }}
uses: kestra-io/actions/composite/slack-status@main
with:
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
channel: 'C09FF36GKE1'

View File

@@ -5,15 +5,6 @@ on:
tags:
- 'v*'
workflow_dispatch:
inputs:
skip-test:
description: 'Skip test'
type: choice
required: true
default: 'false'
options:
- "true"
- "false"
jobs:
build-artifacts:
@@ -23,7 +14,6 @@ jobs:
backend-tests:
name: Backend tests
uses: kestra-io/actions/.github/workflows/kestra-oss-backend-tests.yml@main
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
@@ -33,7 +23,6 @@ jobs:
frontend-tests:
name: Frontend tests
uses: kestra-io/actions/.github/workflows/kestra-oss-frontend-tests.yml@main
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
secrets:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -22,11 +22,12 @@ jobs:
fetch-depth: 0
# Setup build
- uses: kestra-io/actions/composite/setup-build@main
- uses: ./actions/.github/actions/setup-build
id: build
with:
java-enabled: true
node-enabled: true
caches-enabled: true
# Npm
- name: Npm - Install
@@ -43,7 +44,7 @@ jobs:
# Upload dependency check report
- name: Upload dependency check report
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: dependency-check-report
@@ -68,10 +69,11 @@ jobs:
with:
java-enabled: false
node-enabled: false
caches-enabled: true
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
uses: aquasecurity/trivy-action@0.33.1
with:
image-ref: kestra/kestra:develop
format: 'template'
@@ -81,7 +83,7 @@ jobs:
skip-dirs: /app/plugins
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v4
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
category: docker-
@@ -108,7 +110,7 @@ jobs:
# Run Trivy image scan for Docker vulnerabilities, see https://github.com/aquasecurity/trivy-action
- name: Docker Vulnerabilities Check
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
uses: aquasecurity/trivy-action@0.33.1
with:
image-ref: kestra/kestra:latest
format: table
@@ -118,7 +120,6 @@ jobs:
output: 'trivy-results.sarif'
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@v4
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: 'trivy-results.sarif'
category: docker-
sarif_file: 'trivy-results.sarif'

View File

@@ -66,7 +66,6 @@
#plugin-jdbc:io.kestra.plugin:plugin-jdbc-sybase:LATEST
#plugin-jenkins:io.kestra.plugin:plugin-jenkins:LATEST
#plugin-jira:io.kestra.plugin:plugin-jira:LATEST
#plugin-jms:io.kestra.plugin:plugin-jms:LATEST
#plugin-kafka:io.kestra.plugin:plugin-kafka:LATEST
#plugin-kestra:io.kestra.plugin:plugin-kestra:LATEST
#plugin-kubernetes:io.kestra.plugin:plugin-kubernetes:LATEST

View File

@@ -1,5 +1,4 @@
ARG KESTRA_DOCKER_BASE_VERSION=develop
FROM kestra/kestra:$KESTRA_DOCKER_BASE_VERSION
FROM kestra/kestra:develop
USER root

View File

@@ -68,12 +68,6 @@ Kestra is an open-source, event-driven orchestration platform that makes both **
## 🚀 Quick Start
### Launch on AWS (CloudFormation)
Deploy Kestra on AWS using our CloudFormation template:
[![Launch Stack](https://cdn.rawgit.com/buildkite/cloudformation-launch-stack-button-svg/master/launch-stack.svg)](https://console.aws.amazon.com/cloudformation/home#/stacks/create/review?templateURL=https://kestra-deployment-templates.s3.eu-west-3.amazonaws.com/aws/cloudformation/ec2-rds-s3/kestra-oss.yaml&stackName=kestra-oss)
### Get Started Locally in 5 Minutes
#### Launch Kestra in Docker
@@ -104,7 +98,7 @@ If you're on Windows and use WSL (Linux-based environment in Windows):
```bash
docker run --pull=always --rm -it -p 8080:8080 --user=root \
-v "/var/run/docker.sock:/var/run/docker.sock" \
-v "/mnt/c/Temp:/tmp" kestra/kestra:latest server local
-v "C:/Temp:/tmp" kestra/kestra:latest server local
```
Check our [Installation Guide](https://kestra.io/docs/installation) for other deployment options (Docker Compose, Podman, Kubernetes, AWS, GCP, Azure, and more).

View File

@@ -21,7 +21,7 @@ plugins {
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.0.1.6134"
id "org.sonarqube" version "6.3.1.5724"
id 'jacoco-report-aggregation'
// helper
@@ -37,7 +37,7 @@ plugins {
id "com.vanniktech.maven.publish" version "0.34.0"
// OWASP dependency check
id "org.owasp.dependencycheck" version "12.1.8" apply false
id "org.owasp.dependencycheck" version "12.1.5" apply false
}
idea {
@@ -206,69 +206,41 @@ subprojects {subProj ->
testImplementation 'org.assertj:assertj-core'
}
def commonTestConfig = { Test t ->
test {
useJUnitPlatform()
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true;
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
// set Xmx for test workers
t.maxHeapSize = '4g'
maxHeapSize = '4g'
// configure en_US default locale for tests
t.systemProperty 'user.language', 'en'
t.systemProperty 'user.country', 'US'
systemProperty 'user.language', 'en'
systemProperty 'user.country', 'US'
t.environment 'SECRET_MY_SECRET', "{\"secretKey\":\"secretValue\"}".bytes.encodeBase64().toString()
t.environment 'SECRET_NEW_LINE', "cGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2\nZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZl\neXJsb25n"
t.environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
t.environment 'SECRET_NON_B64_SECRET', "some secret value"
t.environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
t.environment 'ENV_TEST1', "true"
t.environment 'ENV_TEST2', "Pass by env"
environment 'SECRET_MY_SECRET', "{\"secretKey\":\"secretValue\"}".bytes.encodeBase64().toString()
environment 'SECRET_NEW_LINE', "cGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2\nZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZleXJsb25ncGFzc3dvcmR2ZXJ5dmVyeXZl\neXJsb25n"
environment 'SECRET_WEBHOOK_KEY', "secretKey".bytes.encodeBase64().toString()
environment 'SECRET_NON_B64_SECRET', "some secret value"
environment 'SECRET_PASSWORD', "cGFzc3dvcmQ="
environment 'ENV_TEST1', "true"
environment 'ENV_TEST2', "Pass by env"
if (subProj.name == 'core' || subProj.name == 'jdbc-h2' || subProj.name == 'jdbc-mysql' || subProj.name == 'jdbc-postgres') {
// JUnit 5 parallel settings
t.systemProperty 'junit.jupiter.execution.parallel.enabled', 'true'
t.systemProperty 'junit.jupiter.execution.parallel.mode.default', 'concurrent'
t.systemProperty 'junit.jupiter.execution.parallel.mode.classes.default', 'same_thread'
t.systemProperty 'junit.jupiter.execution.parallel.config.strategy', 'dynamic'
systemProperty 'junit.jupiter.execution.parallel.enabled', 'true'
systemProperty 'junit.jupiter.execution.parallel.mode.default', 'concurrent'
systemProperty 'junit.jupiter.execution.parallel.mode.classes.default', 'same_thread'
systemProperty 'junit.jupiter.execution.parallel.config.strategy', 'dynamic'
}
}
tasks.register('flakyTest', Test) { Test t ->
group = 'verification'
description = 'Runs tests tagged @Flaky but does not fail the build.'
useJUnitPlatform {
includeTags 'flaky'
}
ignoreFailures = true
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/flakyTest")
}
commonTestConfig(t)
}
test {
useJUnitPlatform {
excludeTags 'flaky'
}
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
commonTestConfig(it)
finalizedBy(tasks.named('flakyTest'))
}
testlogger {
theme = 'mocha-parallel'
showExceptions = true
@@ -372,7 +344,7 @@ tasks.named('testCodeCoverageReport') {
subprojects {
sonar {
properties {
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml,$projectDir.parentFile.path/build/reports/jacoco/test/testCodeCoverageReport.xml"
property "sonar.coverage.jacoco.xmlReportPaths", "$projectDir.parentFile.path/build/reports/jacoco/testCodeCoverageReport/testCodeCoverageReport.xml"
}
}
}

View File

@@ -117,7 +117,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/validate", tenantService.getTenantIdAndAllowEETenants(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/validate", tenantService.getTenantId(tenantId)), body).contentType(MediaType.APPLICATION_YAML);
List<ValidateConstraintViolation> validations = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -43,7 +43,7 @@ import java.util.concurrent.Callable;
SysCommand.class,
ConfigCommand.class,
NamespaceCommand.class,
MigrationCommand.class
MigrationCommand.class,
}
)
@Introspected

View File

@@ -24,8 +24,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
private FlowService flowService;
@Inject
private TenantIdSelectorService tenantIdSelectorService;
private TenantIdSelectorService tenantService;
@Override
public Integer call() throws Exception {
@@ -40,7 +39,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
FlowWithSource flow = (FlowWithSource) object;
List<String> warnings = new ArrayList<>();
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
warnings.addAll(flowService.warnings(flow, tenantIdSelectorService.getTenantIdAndAllowEETenants(tenantId)));
warnings.addAll(flowService.warnings(flow, tenantService.getTenantId(tenantId)));
return warnings;
},
(Object object) -> {

View File

@@ -64,7 +64,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
}
try(DefaultHttpClient client = client()) {
MutableHttpRequest<String> request = HttpRequest
.POST(apiUri("/flows/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
.POST(apiUri("/flows/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -1,26 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.AbstractCommand;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "metadata",
description = "populate metadata for entities"
)
@Slf4j
public class MetadataMigrationCommand extends AbstractCommand {
@Inject
private MetadataMigrationService metadataMigrationService;
@Override
public Integer call() throws Exception {
super.call();
int returnCode = metadataMigrationService.migrateMetadata();
if (returnCode == 0) {
System.out.println("✅ Metadata migration complete.");
}
return returnCode;
}
}

View File

@@ -1,92 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.core.models.kv.PersistedKvMetadata;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
import io.kestra.core.storages.FileAttributes;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.kv.InternalKVStore;
import io.kestra.core.storages.kv.KVEntry;
import io.kestra.core.tenant.TenantService;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.time.Instant;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import static io.kestra.core.utils.Rethrow.throwFunction;
@Singleton
public class MetadataMigrationService {
@Inject
private TenantService tenantService;
@Inject
private FlowRepositoryInterface flowRepository;
@Inject
private KvMetadataRepositoryInterface kvMetadataRepository;
@Inject
private StorageInterface storageInterface;
protected Map<String, List<String>> namespacesPerTenant() {
String tenantId = tenantService.resolveTenant();
return Map.of(tenantId, flowRepository.findDistinctNamespace(tenantId));
}
public int migrateMetadata() {
try {
kvMigration();
} catch (IOException e) {
System.err.println("❌ KV metadata migration failed: " + e.getMessage());
e.printStackTrace();
return 1;
}
return 0;
}
private void kvMigration() throws IOException {
this.namespacesPerTenant().entrySet().stream()
.flatMap(namespacesForTenant -> namespacesForTenant.getValue().stream().map(namespace -> Map.entry(namespacesForTenant.getKey(), namespace)))
.flatMap(throwFunction(namespaceForTenant -> {
InternalKVStore kvStore = new InternalKVStore(namespaceForTenant.getKey(), namespaceForTenant.getValue(), storageInterface, kvMetadataRepository);
List<FileAttributes> list = listAllFromStorage(storageInterface, namespaceForTenant.getKey(), namespaceForTenant.getValue());
Map<Boolean, List<KVEntry>> entriesByIsExpired = list.stream()
.map(throwFunction(fileAttributes -> KVEntry.from(namespaceForTenant.getValue(), fileAttributes)))
.collect(Collectors.partitioningBy(kvEntry -> Optional.ofNullable(kvEntry.expirationDate()).map(expirationDate -> Instant.now().isAfter(expirationDate)).orElse(false)));
entriesByIsExpired.get(true).forEach(kvEntry -> {
try {
storageInterface.delete(
namespaceForTenant.getKey(),
namespaceForTenant.getValue(),
kvStore.storageUri(kvEntry.key())
);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
return entriesByIsExpired.get(false).stream().map(kvEntry -> PersistedKvMetadata.from(namespaceForTenant.getKey(), kvEntry));
}))
.forEach(kvMetadataRepository::save);
}
private static List<FileAttributes> listAllFromStorage(StorageInterface storage, String tenant, String namespace) throws IOException {
try {
return storage.list(tenant, namespace, URI.create(StorageContext.KESTRA_PROTOCOL + StorageContext.kvPrefix(namespace)));
} catch (FileNotFoundException e) {
return Collections.emptyList();
}
}
}

View File

@@ -13,7 +13,6 @@ import picocli.CommandLine;
mixinStandardHelpOptions = true,
subcommands = {
TenantMigrationCommand.class,
MetadataMigrationCommand.class
}
)
@Slf4j

View File

@@ -49,7 +49,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
try (var files = Files.walk(from); DefaultHttpClient client = client()) {
if (delete) {
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + to, null)));
client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + to, null)));
}
KestraIgnore kestraIgnore = new KestraIgnore(from);
@@ -67,7 +67,7 @@ public class NamespaceFilesUpdateCommand extends AbstractApiCommand {
client.toBlocking().exchange(
this.requestOptions(
HttpRequest.POST(
apiUri("/namespaces/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "/files?path=" + destination,
apiUri("/namespaces/", tenantService.getTenantId(tenantId)) + namespace + "/files?path=" + destination,
body
).contentType(MediaType.MULTIPART_FORM_DATA)
)

View File

@@ -6,7 +6,6 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.ExecutionQueued;
import io.kestra.core.services.ConcurrencyLimitService;
import io.kestra.jdbc.runner.AbstractJdbcExecutionQueuedStorage;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -16,6 +15,8 @@ import picocli.CommandLine;
import java.util.Optional;
import static io.kestra.core.utils.Rethrow.throwConsumer;
@CommandLine.Command(
name = "submit-queued-execution",
description = {"Submit all queued execution to the executor",
@@ -48,11 +49,9 @@ public class SubmitQueuedCommand extends AbstractCommand {
}
else if (queueType.get().equals("postgres") || queueType.get().equals("mysql") || queueType.get().equals("h2")) {
var executionQueuedStorage = applicationContext.getBean(AbstractJdbcExecutionQueuedStorage.class);
var concurrencyLimitService = applicationContext.getBean(ConcurrencyLimitService.class);
for (ExecutionQueued queued : executionQueuedStorage.getAllForAllTenants()) {
Execution restart = concurrencyLimitService.unqueue(queued.getExecution(), State.Type.RUNNING);
executionQueue.emit(restart);
executionQueuedStorage.pop(queued.getTenantId(), queued.getNamespace(), queued.getFlowId(), throwConsumer(execution -> executionQueue.emit(execution.withState(State.Type.CREATED))));
cpt++;
}
}

View File

@@ -49,7 +49,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
try (DefaultHttpClient client = client()) {
MutableHttpRequest<List<Template>> request = HttpRequest
.POST(apiUri("/templates/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, templates);
.POST(apiUri("/templates/", tenantService.getTenantId(tenantId)) + namespace + "?delete=" + delete, templates);
List<UpdateResult> updated = client.toBlocking().retrieve(
this.requestOptions(request),

View File

@@ -1,69 +0,0 @@
package io.kestra.cli.listeners;
import io.kestra.core.server.LocalServiceState;
import io.kestra.core.server.Service;
import io.kestra.core.server.ServiceRegistry;
import io.micronaut.context.annotation.Requires;
import io.micronaut.context.event.ApplicationEventListener;
import io.micronaut.context.event.ShutdownEvent;
import io.micronaut.core.annotation.Order;
import io.micronaut.core.order.Ordered;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ForkJoinPool;
/**
* Global application shutdown handler.
* This handler gets effectively invoked before {@link jakarta.annotation.PreDestroy} does.
*/
@Singleton
@Slf4j
@Order(Ordered.LOWEST_PRECEDENCE)
@Requires(property = "kestra.server-type")
public class GracefulEmbeddedServiceShutdownListener implements ApplicationEventListener<ShutdownEvent> {
@Inject
ServiceRegistry serviceRegistry;
/**
* {@inheritDoc}
**/
@Override
public boolean supports(ShutdownEvent event) {
return ApplicationEventListener.super.supports(event);
}
/**
* Wait for services' close actions
*
* @param event the event to respond to
*/
@Override
public void onApplicationEvent(ShutdownEvent event) {
List<LocalServiceState> states = serviceRegistry.all();
if (states.isEmpty()) {
return;
}
log.debug("Shutdown event received");
List<CompletableFuture<Void>> futures = states.stream()
.map(state -> CompletableFuture.runAsync(() -> closeService(state), ForkJoinPool.commonPool()))
.toList();
// Wait for all services to close, before shutting down the embedded server
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
}
private void closeService(LocalServiceState state) {
final Service service = state.service();
try {
service.unwrap().close();
} catch (Exception e) {
log.error("[Service id={}, type={}] Unexpected error on close", service.getId(), service.getType(), e);
}
}
}

View File

@@ -16,11 +16,4 @@ public class TenantIdSelectorService {
}
return MAIN_TENANT;
}
public String getTenantIdAndAllowEETenants(String tenantId) {
if (StringUtils.isNotBlank(tenantId)){
return tenantId;
}
return MAIN_TENANT;
}
}

View File

@@ -49,8 +49,6 @@ micronaut:
- /ui/.+
- /health
- /health/.+
- /metrics
- /metrics/.+
- /prometheus
http-version: HTTP_1_1
caches:

View File

@@ -1,76 +0,0 @@
package io.kestra.cli.commands.configs.sys;
import io.kestra.cli.commands.flows.FlowCreateCommand;
import io.kestra.cli.commands.namespaces.kv.KvCommand;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.runtime.server.EmbeddedServer;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Verifies CLI behavior without repository configuration:
* - Repo-independent commands succeed (e.g. KV with no params).
* - Repo-dependent commands fail with a clear error.
*/
class NoConfigCommandTest {
@Test
void shouldSucceedWithNamespaceKVCommandWithoutParamsAndConfig() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
String[] args = {};
Integer call = PicocliRunner.call(KvCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("Usage: kestra namespace kv");
}
}
@Test
void shouldFailWithCreateFlowCommandWithoutConfig() throws URISyntaxException {
URL flowUrl = NoConfigCommandTest.class.getClassLoader().getResource("crudFlow/date.yml");
Objects.requireNonNull(flowUrl, "Test flow resource not found");
Path flowPath = Paths.get(flowUrl.toURI());
ByteArrayOutputStream out = new ByteArrayOutputStream();
ByteArrayOutputStream err=new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
System.setErr(new PrintStream(err));
try (ApplicationContext ctx = ApplicationContext.builder()
.deduceEnvironment(false)
.start()) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] createArgs = {
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
flowPath.toString(),
};
Integer exitCode = PicocliRunner.call(FlowCreateCommand.class, ctx, createArgs);
assertThat(exitCode).isNotZero();
assertThat(out.toString()).isEmpty();
assertThat(err.toString()).contains("No bean of type [io.kestra.core.repositories.FlowRepositoryInterface] exists");
}
}
}

View File

@@ -27,26 +27,6 @@ class FlowValidateCommandTest {
}
}
@Test
// github action kestra-io/validate-action requires being able to validate Flows from OSS CLI against a remote EE instance
void runForEEInstance() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
String[] args = {
"--tenant",
"some-ee-tenant",
"--local",
"src/test/resources/helper/include.yaml"
};
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
assertThat(call).isZero();
assertThat(out.toString()).contains("✓ - io.kestra.cli / include");
}
}
@Test
void warning() {
ByteArrayOutputStream out = new ByteArrayOutputStream();

View File

@@ -1,125 +0,0 @@
package io.kestra.cli.commands.migrations;
import io.kestra.cli.App;
import io.kestra.core.exceptions.ResourceExpiredException;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.kv.PersistedKvMetadata;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.KvMetadataRepositoryInterface;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.StorageObject;
import io.kestra.core.storages.kv.*;
import io.kestra.core.tenant.TenantService;
import io.kestra.core.utils.TestsUtils;
import io.kestra.plugin.core.log.Log;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import io.micronaut.core.annotation.NonNull;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
import java.time.Duration;
import java.time.Instant;
import java.util.List;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
public class MetadataMigrationCommandTest {
@Test
void run() throws IOException, ResourceExpiredException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
ByteArrayOutputStream err = new ByteArrayOutputStream();
System.setErr(new PrintStream(err));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
String namespace = TestsUtils.randomNamespace();
String key = "myKey";
StorageInterface storage = ctx.getBean(StorageInterface.class);
String description = "Some description";
String value = "someValue";
putOldKv(storage, namespace, key, description, value);
String anotherNamespace = TestsUtils.randomNamespace();
String anotherKey = "anotherKey";
String anotherDescription = "another description";
putOldKv(storage, anotherNamespace, anotherKey, anotherDescription, "anotherValue");
String tenantId = TenantService.MAIN_TENANT;
// Expired KV should not be migrated + should be purged from the storage
String expiredKey = "expiredKey";
putOldKv(storage, namespace, expiredKey, Instant.now().minus(Duration.ofMinutes(5)), "some expired description", "expiredValue");
assertThat(storage.exists(tenantId, null, getKvStorageUri(namespace, expiredKey))).isTrue();
KvMetadataRepositoryInterface kvMetadataRepository = ctx.getBean(KvMetadataRepositoryInterface.class);
assertThat(kvMetadataRepository.findByName(tenantId, namespace, key).isPresent()).isFalse();
String[] kvMetadataMigrationCommand = {
"migrate", "metadata"
};
PicocliRunner.call(App.class, ctx, kvMetadataMigrationCommand);
assertThat(out.toString()).contains("✅ Metadata migration complete.");
// Still it's not in the metadata repository because no flow exist to find that kv
assertThat(kvMetadataRepository.findByName(tenantId, namespace, key).isPresent()).isFalse();
assertThat(kvMetadataRepository.findByName(tenantId, anotherNamespace, anotherKey).isPresent()).isFalse();
FlowRepositoryInterface flowRepository = ctx.getBean(FlowRepositoryInterface.class);
flowRepository.create(GenericFlow.of(Flow.builder()
.tenantId(tenantId)
.id("a-flow")
.namespace(namespace)
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
.build()));
out.reset();
PicocliRunner.call(App.class, ctx, kvMetadataMigrationCommand);
assertThat(out.toString()).contains("✅ Metadata migration complete.");
Optional<PersistedKvMetadata> foundKv = kvMetadataRepository.findByName(tenantId, namespace, key);
assertThat(foundKv.isPresent()).isTrue();
assertThat(foundKv.get().getDescription()).isEqualTo(description);
assertThat(kvMetadataRepository.findByName(tenantId, anotherNamespace, anotherKey).isPresent()).isFalse();
KVStore kvStore = new InternalKVStore(tenantId, namespace, storage, kvMetadataRepository);
Optional<KVEntry> actualKv = kvStore.get(key);
assertThat(actualKv.isPresent()).isTrue();
assertThat(actualKv.get().description()).isEqualTo(description);
Optional<KVValue> actualValue = kvStore.getValue(key);
assertThat(actualValue.isPresent()).isTrue();
assertThat(actualValue.get().value()).isEqualTo(value);
assertThat(kvMetadataRepository.findByName(tenantId, namespace, expiredKey).isPresent()).isFalse();
assertThat(storage.exists(tenantId, null, getKvStorageUri(namespace, expiredKey))).isFalse();
}
}
private static void putOldKv(StorageInterface storage, String namespace, String key, String description, String value) throws IOException {
putOldKv(storage, namespace, key, Instant.now().plus(Duration.ofMinutes(5)), description, value);
}
private static void putOldKv(StorageInterface storage, String namespace, String key, Instant expirationDate, String description, String value) throws IOException {
URI kvStorageUri = getKvStorageUri(namespace, key);
KVValueAndMetadata kvValueAndMetadata = new KVValueAndMetadata(new KVMetadata(description, expirationDate), value);
storage.put(TenantService.MAIN_TENANT, namespace, kvStorageUri, new StorageObject(
kvValueAndMetadata.metadataAsMap(),
new ByteArrayInputStream(JacksonMapper.ofIon().writeValueAsBytes(kvValueAndMetadata.value()))
));
}
private static @NonNull URI getKvStorageUri(String namespace, String key) {
return URI.create(StorageContext.KESTRA_PROTOCOL + StorageContext.kvPrefix(namespace) + "/" + key + ".ion");
}
}

View File

@@ -7,8 +7,6 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
/**
* Top-level marker interface for Kestra's plugin of type App.
*/
@@ -20,6 +18,6 @@ public interface AppBlockInterface extends io.kestra.core.models.Plugin {
)
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
String getType();
}

View File

@@ -7,8 +7,6 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
/**
* Top-level marker interface for Kestra's plugin of type App.
*/
@@ -20,6 +18,6 @@ public interface AppPluginInterface extends io.kestra.core.models.Plugin {
)
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
String getType();
}

View File

@@ -15,7 +15,6 @@ import com.github.victools.jsonschema.generator.impl.DefinitionKey;
import com.github.victools.jsonschema.generator.naming.DefaultSchemaDefinitionNamingStrategy;
import com.github.victools.jsonschema.module.jackson.JacksonModule;
import com.github.victools.jsonschema.module.jackson.JacksonOption;
import com.github.victools.jsonschema.module.jackson.JsonUnwrappedDefinitionProvider;
import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationModule;
import com.github.victools.jsonschema.module.jakarta.validation.JakartaValidationOption;
import com.github.victools.jsonschema.module.swagger2.Swagger2Module;
@@ -46,9 +45,6 @@ import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.*;
import java.time.*;
@@ -62,9 +58,7 @@ import static io.kestra.core.docs.AbstractClassDocumentation.required;
import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
@Singleton
@Slf4j
public class JsonSchemaGenerator {
private static final List<Class<?>> TYPES_RESOLVED_AS_STRING = List.of(Duration.class, LocalTime.class, LocalDate.class, LocalDateTime.class, ZonedDateTime.class, OffsetDateTime.class, OffsetTime.class);
private static final List<Class<?>> SUBTYPE_RESOLUTION_EXCLUSION_FOR_PLUGIN_SCHEMA = List.of(Task.class, AbstractTrigger.class);
@@ -276,22 +270,8 @@ public class JsonSchemaGenerator {
.with(Option.DEFINITIONS_FOR_ALL_OBJECTS)
.with(Option.DEFINITION_FOR_MAIN_SCHEMA)
.with(Option.PLAIN_DEFINITION_KEYS)
.with(Option.ALLOF_CLEANUP_AT_THE_END);
// HACK: Registered a custom JsonUnwrappedDefinitionProvider prior to the JacksonModule
// to be able to return an CustomDefinition with an empty node when the ResolvedType can't be found.
builder.forTypesInGeneral().withCustomDefinitionProvider(new JsonUnwrappedDefinitionProvider(){
@Override
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
try {
return super.provideCustomSchemaDefinition(javaType, context);
} catch (NoClassDefFoundError e) {
// This error happens when a non-supported plugin type exists in the classpath.
log.debug("Cannot create schema definition for type '{}'. Cause: NoClassDefFoundError", javaType.getTypeName());
return new CustomDefinition(context.getGeneratorConfig().createObjectNode(), true);
}
}
});
.with(Option.ALLOF_CLEANUP_AT_THE_END);;
if (!draft7) {
builder.with(new JacksonModule(JacksonOption.IGNORE_TYPE_INFO_TRANSFORM));
} else {
@@ -320,7 +300,6 @@ public class JsonSchemaGenerator {
// inline some type
builder.forTypesInGeneral()
.withCustomDefinitionProvider(new CustomDefinitionProviderV2() {
@Override
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
if (javaType.isInstanceOf(Map.class) || javaType.isInstanceOf(Enum.class)) {

View File

@@ -1,15 +0,0 @@
package io.kestra.core.exceptions;
public class InvalidTriggerConfigurationException extends KestraRuntimeException {
public InvalidTriggerConfigurationException() {
super();
}
public InvalidTriggerConfigurationException(String message) {
super(message);
}
public InvalidTriggerConfigurationException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -91,13 +91,11 @@ public class HttpConfiguration {
@Deprecated
private final String proxyPassword;
@Schema(title = "The username for HTTP basic authentication. " +
"Deprecated, use `auth` property with a `BasicAuthConfiguration` instance instead.")
@Schema(title = "The username for HTTP basic authentication.")
@Deprecated
private final String basicAuthUser;
@Schema(title = "The password for HTTP basic authentication. " +
"Deprecated, use `auth` property with a `BasicAuthConfiguration` instance instead.")
@Schema(title = "The password for HTTP basic authentication.")
@Deprecated
private final String basicAuthPassword;

View File

@@ -1,7 +0,0 @@
package io.kestra.core.models;
public enum FetchVersion {
LATEST,
OLD,
ALL
}

View File

@@ -91,16 +91,10 @@ public record QueryFilter(
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
}
},
KIND("kind") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS,Op.NOT_EQUALS);
}
},
LABELS("labels") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.IN, Op.NOT_IN, Op.CONTAINS);
return List.of(Op.EQUALS, Op.NOT_EQUALS);
}
},
FLOW_ID("flowId") {
@@ -109,12 +103,6 @@ public record QueryFilter(
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
}
},
UPDATED("updated") {
@Override
public List<Op> supportedOp() {
return List.of(Op.GREATER_THAN_OR_EQUAL_TO, Op.GREATER_THAN, Op.LESS_THAN_OR_EQUAL_TO, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
}
},
START_DATE("startDate") {
@Override
public List<Op> supportedOp() {
@@ -223,7 +211,7 @@ public record QueryFilter(
return List.of(
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
Field.NAMESPACE,Field.KIND
Field.NAMESPACE
);
}
},
@@ -256,25 +244,6 @@ public record QueryFilter(
Field.START_DATE, Field.END_DATE, Field.TRIGGER_ID
);
}
},
SECRET_METADATA {
@Override
public List<Field> supportedField() {
return List.of(
Field.QUERY,
Field.NAMESPACE
);
}
},
KV_METADATA {
@Override
public List<Field> supportedField() {
return List.of(
Field.QUERY,
Field.NAMESPACE,
Field.UPDATED
);
}
};
public abstract List<Field> supportedField();
@@ -285,7 +254,7 @@ public record QueryFilter(
*
* @return List of {@code ResourceField} with resource names, fields, and operations.
*/
private static FieldOp toFieldInfo(Field field) {
List<Operation> operations = field.supportedOp().stream()
.map(Resource::toOperation)

View File

@@ -1,3 +0,0 @@
package io.kestra.core.models;
public record TenantAndNamespace(String tenantId, String namespace) {}

View File

@@ -12,8 +12,6 @@ import lombok.experimental.SuperBuilder;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@io.kestra.core.models.annotations.Plugin
@SuperBuilder
@Getter
@@ -22,6 +20,6 @@ import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public abstract class Condition implements Plugin, Rethrow.PredicateChecked<ConditionContext, InternalException> {
@NotNull
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
protected String type;
}

View File

@@ -32,8 +32,6 @@ public class Dashboard implements HasUID, DeletedInterface {
private String tenantId;
@Hidden
@NotNull
@NotBlank
private String id;
@NotNull

View File

@@ -20,8 +20,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@@ -30,7 +28,7 @@ import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F>> implements io.kestra.core.models.Plugin, IData<F> {
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp = "\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
private String type;
private Map<String, C> columns;

View File

@@ -19,8 +19,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Set;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@@ -29,7 +27,7 @@ import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
public abstract class DataFilterKPI<F extends Enum<F>, C extends ColumnDescriptor<F>> implements io.kestra.core.models.Plugin, IData<F> {
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp = "\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
private String type;
private C columns;

View File

@@ -12,8 +12,6 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@@ -28,7 +26,7 @@ public abstract class Chart<P extends ChartOption> implements io.kestra.core.mod
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp = "\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
protected String type;
@Valid

View File

@@ -496,7 +496,7 @@ public class Execution implements DeletedInterface, TenantInterface {
}
if (resolvedFinally != null && (
this.isTerminated(resolvedTasks, parentTaskRun) || this.hasFailedNoRetry(resolvedTasks, parentTaskRun
this.isTerminated(resolvedTasks, parentTaskRun) || this.hasFailed(resolvedTasks, parentTaskRun
))) {
return resolvedFinally;
}
@@ -584,13 +584,6 @@ public class Execution implements DeletedInterface, TenantInterface {
);
}
public Optional<TaskRun> findLastSubmitted(List<TaskRun> taskRuns) {
return Streams.findLast(taskRuns
.stream()
.filter(t -> t.getState().getCurrent() == State.Type.SUBMITTED)
);
}
public Optional<TaskRun> findLastRunning(List<TaskRun> taskRuns) {
return Streams.findLast(taskRuns
.stream()
@@ -941,15 +934,7 @@ public class Execution implements DeletedInterface, TenantInterface {
for (TaskRun current : taskRuns) {
if (!MapUtils.isEmpty(current.getOutputs())) {
if (current.getIteration() != null) {
Map<String, Object> merged = MapUtils.merge(taskOutputs, outputs(current, byIds));
// If one of two of the map is null in the merge() method, we just return the other
// And if the not null map is a Variables (= read only), we cast it back to a simple
// hashmap to avoid taskOutputs becoming read-only
// i.e this happen in nested loopUntil tasks
if (merged instanceof Variables) {
merged = new HashMap<>(merged);
}
taskOutputs = merged;
taskOutputs = MapUtils.merge(taskOutputs, outputs(current, byIds));
} else {
taskOutputs.putAll(outputs(current, byIds));
}

View File

@@ -10,7 +10,6 @@ import io.swagger.v3.oas.annotations.Hidden;
import jakarta.annotation.Nullable;
import lombok.Builder;
import lombok.Value;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.event.Level;
import jakarta.validation.constraints.NotNull;
@@ -121,16 +120,6 @@ public class LogEntry implements DeletedInterface, TenantInterface {
return logEntry.getTimestamp().toString() + " " + logEntry.getLevel() + " " + logEntry.getMessage();
}
public static String toPrettyString(LogEntry logEntry, Integer maxMessageSize) {
String message;
if (maxMessageSize != null && maxMessageSize > 0) {
message = StringUtils.truncate(logEntry.getMessage(), maxMessageSize);
} else {
message = logEntry.getMessage();
}
return logEntry.getTimestamp().toString() + " " + logEntry.getLevel() + " " + message;
}
public Map<String, String> toMap() {
return Stream
.of(

View File

@@ -4,7 +4,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.executions.metrics.Counter;
import io.kestra.core.models.executions.metrics.Gauge;
import io.kestra.core.models.executions.metrics.Timer;
import io.swagger.v3.oas.annotations.Hidden;
import jakarta.annotation.Nullable;
@@ -83,10 +82,6 @@ public class MetricEntry implements DeletedInterface, TenantInterface {
return counter.getValue();
}
if (metricEntry instanceof Gauge gauge) {
return gauge.getValue();
}
if (metricEntry instanceof Timer timer) {
return (double) timer.getValue().toMillis();
}

View File

@@ -3,9 +3,7 @@ package io.kestra.core.models.executions;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.tasks.FlowableTask;
import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.utils.IdUtils;
import io.swagger.v3.oas.annotations.Hidden;
@@ -54,7 +52,6 @@ public class TaskRun implements TenantInterface {
@With
@JsonInclude(JsonInclude.Include.ALWAYS)
@Nullable
Variables outputs;
@NotNull
@@ -67,6 +64,7 @@ public class TaskRun implements TenantInterface {
Boolean dynamic;
// Set it to true to force execution even if the execution is killed
@Nullable
@With
Boolean forceExecution;
@@ -195,17 +193,17 @@ public class TaskRun implements TenantInterface {
taskRunBuilder.attempts = new ArrayList<>();
taskRunBuilder.attempts.add(TaskRunAttempt.builder()
.state(new State(this.state, State.Type.RESUBMITTED))
.state(new State(this.state, State.Type.KILLED))
.build()
);
} else {
ArrayList<TaskRunAttempt> taskRunAttempts = new ArrayList<>(taskRunBuilder.attempts);
TaskRunAttempt lastAttempt = taskRunAttempts.get(taskRunBuilder.attempts.size() - 1);
if (!lastAttempt.getState().isTerminated()) {
taskRunAttempts.set(taskRunBuilder.attempts.size() - 1, lastAttempt.withState(State.Type.RESUBMITTED));
taskRunAttempts.set(taskRunBuilder.attempts.size() - 1, lastAttempt.withState(State.Type.KILLED));
} else {
taskRunAttempts.add(TaskRunAttempt.builder()
.state(new State().withState(State.Type.RESUBMITTED))
.state(new State().withState(State.Type.KILLED))
.build()
);
}
@@ -219,7 +217,7 @@ public class TaskRun implements TenantInterface {
public boolean isSame(TaskRun taskRun) {
return this.getId().equals(taskRun.getId()) &&
((this.getValue() == null && taskRun.getValue() == null) || (this.getValue() != null && this.getValue().equals(taskRun.getValue()))) &&
((this.getIteration() == null && taskRun.getIteration() == null) || (this.getIteration() != null && this.getIteration().equals(taskRun.getIteration())));
((this.getIteration() == null && taskRun.getIteration() == null) || (this.getIteration() != null && this.getIteration().equals(taskRun.getIteration()))) ;
}
public String toString(boolean pretty) {
@@ -251,7 +249,7 @@ public class TaskRun implements TenantInterface {
* This method is used when the retry is apply on a task
* but the retry type is NEW_EXECUTION
*
* @param retry Contains the retry configuration
* @param retry Contains the retry configuration
* @param execution Contains the attempt number and original creation date
* @return The next retry date, null if maxAttempt || maxDuration is reached
*/
@@ -272,7 +270,6 @@ public class TaskRun implements TenantInterface {
/**
* This method is used when the Retry definition comes from the flow
*
* @param retry The retry configuration
* @return The next retry date, null if maxAttempt || maxDuration is reached
*/

View File

@@ -1,78 +0,0 @@
package io.kestra.core.models.executions.metrics;
import com.fasterxml.jackson.annotation.JsonInclude;
import jakarta.annotation.Nullable;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.executions.AbstractMetricEntry;
import jakarta.validation.constraints.NotNull;
import java.util.Map;
@ToString
@EqualsAndHashCode
@Getter
@NoArgsConstructor
public class Gauge extends AbstractMetricEntry<Double> {
public static final String TYPE = "gauge";
@NotNull
@JsonInclude
private final String type = TYPE;
@NotNull
@EqualsAndHashCode.Exclude
private Double value;
private Gauge(@NotNull String name, @Nullable String description, @NotNull Double value, String... tags) {
super(name, description, tags);
this.value = value;
}
public static Gauge of(@NotNull String name, @NotNull Double value, String... tags) {
return new Gauge(name, null, value, tags);
}
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Double value, String... tags) {
return new Gauge(name, description, value, tags);
}
public static Gauge of(@NotNull String name, @NotNull Integer value, String... tags) {
return new Gauge(name, null, (double) value, tags);
}
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Integer value, String... tags) {
return new Gauge(name, description, (double) value, tags);
}
public static Gauge of(@NotNull String name, @NotNull Long value, String... tags) {
return new Gauge(name, null, (double) value, tags);
}
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Long value, String... tags) {
return new Gauge(name, description, (double) value, tags);
}
public static Gauge of(@NotNull String name, @NotNull Float value, String... tags) {
return new Gauge(name, null, (double) value, tags);
}
public static Gauge of(@NotNull String name, @Nullable String description, @NotNull Float value, String... tags) {
return new Gauge(name, description, (double) value, tags);
}
@Override
public void register(MetricRegistry meterRegistry, String name, String description, Map<String, String> tags) {
meterRegistry
.gauge(this.metricName(name), description, this.value, this.tagsAsArray(tags));
}
@Override
public void increment(Double value) {
this.value = value;
}
}

View File

@@ -61,22 +61,18 @@ public abstract class AbstractFlow implements FlowInterface {
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(
description = "Labels as a list of Label (key/value pairs) or as a map of string to string.",
oneOf = {
Label[].class,
Map.class
}
description = "Labels as a list of Label (key/value pairs) or as a map of string to string.",
oneOf = {
Label[].class,
Map.class
}
)
@Valid
List<Label> labels;
@Schema(
type = "object",
additionalProperties = Schema.AdditionalPropertiesValue.FALSE
)
@Schema(additionalProperties = Schema.AdditionalPropertiesValue.TRUE)
Map<String, Object> variables;
@Valid
private WorkerGroup workerGroup;

View File

@@ -49,7 +49,7 @@ import java.util.stream.Stream;
public class Flow extends AbstractFlow implements HasUID {
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofYaml()
.copy()
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
private static final ObjectMapper WITHOUT_REVISION_OBJECT_MAPPER = NON_DEFAULT_OBJECT_MAPPER.copy()
.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true)
@@ -61,11 +61,6 @@ public class Flow extends AbstractFlow implements HasUID {
}
});
@Schema(
type = "object",
additionalProperties = Schema.AdditionalPropertiesValue.FALSE
)
Map<String, Object> variables;
@Valid

View File

@@ -136,7 +136,7 @@ public interface FlowInterface extends FlowId, DeletedInterface, TenantInterface
class SourceGenerator {
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofJson()
.copy()
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
static String generate(final FlowInterface flow) {
try {

View File

@@ -5,7 +5,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.kestra.core.models.flows.input.*;
import io.kestra.core.models.property.Property;
import io.kestra.core.validations.InputValidation;
import io.kestra.core.runners.RunContext;
import io.micronaut.core.annotation.Introspected;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.ConstraintViolationException;
@@ -18,6 +18,8 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.function.Function;
@SuppressWarnings("deprecation")
@SuperBuilder
@Getter
@@ -45,7 +47,6 @@ import lombok.experimental.SuperBuilder;
@JsonSubTypes.Type(value = YamlInput.class, name = "YAML"),
@JsonSubTypes.Type(value = EmailInput.class, name = "EMAIL"),
})
@InputValidation
public abstract class Input<T> implements Data {
@Schema(
title = "The ID of the input."
@@ -82,13 +83,7 @@ public abstract class Input<T> implements Data {
title = "The default value to use if no value is specified."
)
Property<T> defaults;
@Schema(
title = "The suggested value for the input.",
description = "Optional UI hint for pre-filling the input. Cannot be used together with a default value."
)
Property<T> prefill;
@Schema(
title = "The display name of the input."
)

View File

@@ -1,7 +1,6 @@
package io.kestra.core.models.flows;
import io.micronaut.core.annotation.Introspected;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import lombok.Getter;
import lombok.NoArgsConstructor;
@@ -34,12 +33,6 @@ public class Output implements Data {
* The output value. Can be a dynamic expression.
*/
@NotNull
@Schema(
oneOf = {
Object.class,
String.class
}
)
Object value;
/**

View File

@@ -2,7 +2,6 @@ package io.kestra.core.models.flows;
import io.kestra.core.validations.PluginDefaultValidation;
import io.micronaut.core.annotation.Introspected;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
import lombok.AllArgsConstructor;
import lombok.Builder;
@@ -22,10 +21,6 @@ public class PluginDefault {
@Builder.Default
private final boolean forced = false;
@Schema(
type = "object",
additionalProperties = Schema.AdditionalPropertiesValue.FALSE
)
private final Map<String, Object> values;
}

View File

@@ -86,10 +86,11 @@ public class State {
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
public Duration getDuration() {
return Duration.between(
this.histories.getFirst().getDate(),
this.histories.size() > 1 ? this.histories.get(this.histories.size() - 1).getDate() : Instant.now()
);
if(this.getEndDate().isPresent()){
return Duration.between(this.getStartDate(), this.getEndDate().get());
} else {
return Duration.between(this.getStartDate(), Instant.now());
}
}
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
@@ -222,7 +223,6 @@ public class State {
@Introspected
public enum Type {
CREATED,
SUBMITTED,
RUNNING,
PAUSED,
RESTARTED,
@@ -236,15 +236,14 @@ public class State {
RETRYING,
RETRIED,
SKIPPED,
BREAKPOINT,
RESUBMITTED;
BREAKPOINT;
public boolean isTerminated() {
return this == Type.FAILED || this == Type.WARNING || this == Type.SUCCESS || this == Type.KILLED || this == Type.CANCELLED || this == Type.RETRIED || this == Type.SKIPPED || this == Type.RESUBMITTED;
return this == Type.FAILED || this == Type.WARNING || this == Type.SUCCESS || this == Type.KILLED || this == Type.CANCELLED || this == Type.RETRIED || this == Type.SKIPPED;
}
public boolean isTerminatedNoFail() {
return this == Type.WARNING || this == Type.SUCCESS || this == Type.RETRIED || this == Type.SKIPPED || this == Type.RESUBMITTED;
return this == Type.WARNING || this == Type.SUCCESS || this == Type.RETRIED || this == Type.SKIPPED;
}
public boolean isCreated() {

View File

@@ -1,6 +1,5 @@
package io.kestra.core.models.flows.input;
import java.util.Set;
import io.kestra.core.models.flows.Input;
import io.kestra.core.validations.FileInputValidation;
import jakarta.validation.ConstraintViolationException;
@@ -23,35 +22,10 @@ public class FileInput extends Input<URI> {
@Deprecated(since = "0.24", forRemoval = true)
public String extension;
/**
* List of allowed file extensions (e.g., [".csv", ".txt", ".pdf"]).
* Each extension must start with a dot.
*/
private List<String> allowedFileExtensions;
/**
* Gets the file extension from the URI's path
*/
private String getFileExtension(URI uri) {
String path = uri.getPath();
int lastDotIndex = path.lastIndexOf(".");
return lastDotIndex >= 0 ? path.substring(lastDotIndex).toLowerCase() : "";
}
@Override
public void validate(URI input) throws ConstraintViolationException {
if (input == null || allowedFileExtensions == null || allowedFileExtensions.isEmpty()) {
return;
}
String extension = getFileExtension(input);
if (!allowedFileExtensions.contains(extension.toLowerCase())) {
throw new ConstraintViolationException(
"File type not allowed. Accepted extensions: " + String.join(", ", allowedFileExtensions),
Set.of()
);
}
// no validation yet
}
public static String findFileInputExtension(@NotNull final List<Input<?>> inputs, @NotNull final String fileName) {

View File

@@ -1,79 +0,0 @@
package io.kestra.core.models.kv;
import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.storages.kv.KVEntry;
import io.kestra.core.utils.IdUtils;
import io.swagger.v3.oas.annotations.Hidden;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import lombok.*;
import lombok.experimental.FieldDefaults;
import lombok.extern.slf4j.Slf4j;
import java.time.Instant;
import java.util.Optional;
@Builder(toBuilder = true)
@Slf4j
@Getter
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
@AllArgsConstructor
@ToString
@EqualsAndHashCode
public class PersistedKvMetadata implements DeletedInterface, TenantInterface, HasUID {
@With
@Hidden
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
private String tenantId;
@NotNull
private String namespace;
@NotNull
private String name;
private String description;
@NotNull
private Integer version;
@Builder.Default
private boolean last = true;
@Nullable
private Instant expirationDate;
@Nullable
private Instant created;
@Nullable
private Instant updated;
private boolean deleted;
public static PersistedKvMetadata from(String tenantId, KVEntry kvEntry) {
return PersistedKvMetadata.builder()
.tenantId(tenantId)
.namespace(kvEntry.namespace())
.name(kvEntry.key())
.version(kvEntry.version())
.description(kvEntry.description())
.created(kvEntry.creationDate())
.updated(kvEntry.updateDate())
.expirationDate(kvEntry.expirationDate())
.build();
}
public PersistedKvMetadata asLast() {
Instant saveDate = Instant.now();
return this.toBuilder().created(Optional.ofNullable(this.created).orElse(saveDate)).updated(saveDate).last(true).build();
}
@Override
public String uid() {
return IdUtils.fromParts(getTenantId(), getNamespace(), getName(), getVersion().toString());
}
}

View File

@@ -12,7 +12,6 @@ import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.JacksonMapper;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
@@ -37,12 +36,6 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
@Builder
@NoArgsConstructor
@AllArgsConstructor(access = AccessLevel.PACKAGE)
@Schema(
oneOf = {
Object.class,
String.class
}
)
public class Property<T> {
// By default, durations are stored as numbers.
// We cannot change that globally, as in JDBC/Elastic 'execution.state.duration' must be a number to be able to aggregate them.
@@ -75,7 +68,7 @@ public class Property<T> {
String getExpression() {
return expression;
}
/**
* Returns a new {@link Property} with no cached rendered value,
* so that the next render will evaluate its original Pebble expression.
@@ -91,9 +84,9 @@ public class Property<T> {
/**
* Build a new Property object with a value already set.<br>
* <p>
*
* A property build with this method will always return the value passed at build time, no rendering will be done.
* <p>
*
* Use {@link #ofExpression(String)} to build a property with a Pebble expression instead.
*/
public static <V> Property<V> ofValue(V value) {
@@ -133,12 +126,12 @@ public class Property<T> {
/**
* Build a new Property object with a Pebble expression.<br>
* <p>
*
* Use {@link #ofValue(Object)} to build a property with a value instead.
*/
public static <V> Property<V> ofExpression(@NotNull String expression) {
Objects.requireNonNull(expression, "'expression' is required");
if (!expression.contains("{")) {
if(!expression.contains("{")) {
throw new IllegalArgumentException("'expression' must be a valid Pebble expression");
}
@@ -147,7 +140,7 @@ public class Property<T> {
/**
* Render a property then convert it to its target type.<br>
* <p>
*
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
*
* @see io.kestra.core.runners.RunContextProperty#as(Class)
@@ -158,14 +151,14 @@ public class Property<T> {
/**
* Render a property with additional variables, then convert it to its target type.<br>
* <p>
*
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
*
* @see io.kestra.core.runners.RunContextProperty#as(Class, Map)
*/
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
if (property.value == null) {
String rendered = context.render(property.expression, variables);
String rendered = context.render(property.expression, variables);
property.value = MAPPER.convertValue(rendered, clazz);
}
@@ -174,7 +167,7 @@ public class Property<T> {
/**
* Render a property then convert it as a list of target type.<br>
* <p>
*
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
*
* @see io.kestra.core.runners.RunContextProperty#asList(Class)
@@ -185,7 +178,7 @@ public class Property<T> {
/**
* Render a property with additional variables, then convert it as a list of target type.<br>
* <p>
*
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
*
* @see io.kestra.core.runners.RunContextProperty#asList(Class, Map)
@@ -225,25 +218,25 @@ public class Property<T> {
/**
* Render a property then convert it as a map of target types.<br>
* <p>
*
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
*
* @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class)
*/
public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass) throws IllegalVariableEvaluationException {
public static <T, K,V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass) throws IllegalVariableEvaluationException {
return asMap(property, runContext, keyClass, valueClass, Map.of());
}
/**
* Render a property with additional variables, then convert it as a map of target types.<br>
* <p>
*
* This method is safe to be used as many times as you want as the rendering and conversion will be cached.
* Warning, due to the caching mechanism, this method is not thread-safe.
*
* @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class, Map)
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException {
public static <T, K,V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException {
if (property.value == null) {
JavaType targetMapType = MAPPER.getTypeFactory().constructMapType(Map.class, keyClass, valueClass);

View File

@@ -8,8 +8,6 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public interface TaskInterface extends Plugin, PluginVersioning {
@NotNull
@@ -19,7 +17,7 @@ public interface TaskInterface extends Plugin, PluginVersioning {
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
@Schema(title = "The class name of this task.")
String getType();
}

View File

@@ -11,8 +11,6 @@ import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import reactor.core.publisher.Flux;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@Plugin
@SuperBuilder(toBuilder = true)
@Getter
@@ -24,7 +22,7 @@ public abstract class LogExporter<T extends Output> implements io.kestra.core.m
protected String id;
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
protected String type;
public abstract T sendLogs(RunContext runContext, Flux<LogRecord> logRecords) throws Exception;

View File

@@ -8,16 +8,12 @@ public final class LogRecordMapper {
private LogRecordMapper(){}
public static LogRecord mapToLogRecord(LogEntry log) {
return mapToLogRecord(log, null);
}
public static LogRecord mapToLogRecord(LogEntry log, Integer maxMessageSize) {
return LogRecord.builder()
.resource("Kestra")
.timestampEpochNanos(instantInNanos(log.getTimestamp()))
.severity(log.getLevel().name())
.attributes(log.toLogMap())
.bodyValue(LogEntry.toPrettyString(log, maxMessageSize))
.bodyValue(LogEntry.toPrettyString(log))
.build();
}

View File

@@ -22,7 +22,6 @@ import java.util.Map;
@JsonSubTypes({
@JsonSubTypes.Type(value = CounterMetric.class, name = "counter"),
@JsonSubTypes.Type(value = TimerMetric.class, name = "timer"),
@JsonSubTypes.Type(value = GaugeMetric.class, name = "gauge"),
})
@ToString
@EqualsAndHashCode

View File

@@ -1,44 +0,0 @@
package io.kestra.core.models.tasks.metrics;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.executions.metrics.Gauge;
import io.kestra.core.models.property.Property;
import io.kestra.core.runners.RunContext;
import jakarta.validation.constraints.NotNull;
import lombok.*;
import lombok.experimental.SuperBuilder;
import java.util.Map;
import java.util.stream.Stream;
@ToString
@EqualsAndHashCode
@Getter
@NoArgsConstructor
@AllArgsConstructor
@SuperBuilder
public class GaugeMetric extends AbstractMetric {
public static final String TYPE = "gauge";
@NotNull
@EqualsAndHashCode.Exclude
private Property<Double> value;
@Override
public AbstractMetricEntry<?> toMetric(RunContext runContext) throws IllegalVariableEvaluationException {
String name = runContext.render(this.name).as(String.class).orElseThrow();
Double value = runContext.render(this.value).as(Double.class).orElseThrow();
String description = runContext.render(this.description).as(String.class).orElse(null);
Map<String, String> tags = runContext.render(this.tags).asMap(String.class, String.class);
String[] tagsAsStrings = tags.entrySet().stream()
.flatMap(e -> Stream.of(e.getKey(), e.getValue()))
.toArray(String[]::new);
return Gauge.of(name, description, value, tagsAsStrings);
}
public String getType() {
return TYPE;
}
}

View File

@@ -1,11 +1,3 @@
package io.kestra.core.models.tasks.runners;
import io.kestra.core.models.property.Property;
import io.swagger.v3.oas.annotations.media.Schema;
public interface RemoteRunnerInterface {
@Schema(
title = "Whether to synchronize working directory from remote runner back to local one after run."
)
Property<Boolean> getSyncWorkingDirectory();
}
public interface RemoteRunnerInterface {}

View File

@@ -30,10 +30,6 @@ public interface TaskCommands {
Map<String, Object> getAdditionalVars();
default String outputDirectoryName() {
return this.getWorkingDirectory().relativize(this.getOutputDirectory()).toString();
}
Path getWorkingDirectory();
Path getOutputDirectory();

View File

@@ -7,6 +7,7 @@ import io.kestra.core.models.Plugin;
import io.kestra.core.models.PluginVersioning;
import io.kestra.core.models.WorkerJobLifecycle;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.property.Property;
import io.kestra.core.runners.RunContext;
import io.kestra.plugin.core.runner.Process;
import jakarta.validation.constraints.NotBlank;
@@ -18,14 +19,13 @@ import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import org.apache.commons.lang3.SystemUtils;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Stream;
import static io.kestra.core.utils.WindowsUtils.windowsToUnixPath;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
/**
* Base class for all task runners.
@@ -37,7 +37,7 @@ import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public abstract class TaskRunner<T extends TaskRunnerDetailResult> implements Plugin, PluginVersioning, WorkerJobLifecycle {
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
protected String type;
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)

View File

@@ -44,7 +44,7 @@ public class Template implements DeletedInterface, TenantInterface, HasUID {
return exclusions.contains(m.getName()) || super.hasIgnoreMarker(m);
}
})
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
@Setter
@Hidden

View File

@@ -47,9 +47,9 @@ abstract public class AbstractTrigger implements TriggerInterface {
@Valid
protected List<@Valid @NotNull Condition> conditions;
@NotNull
@Builder.Default
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
@Schema(defaultValue = "false")
private boolean disabled = false;
@Valid

View File

@@ -1,12 +1,10 @@
package io.kestra.core.models.triggers;
import io.kestra.core.exceptions.InvalidTriggerConfigurationException;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.DateTimeException;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.Optional;
@@ -31,29 +29,15 @@ public interface PollingTriggerInterface extends WorkerTriggerInterface {
* Compute the next evaluation date of the trigger based on the existing trigger context: by default, it uses the current date and the interval.
* Schedulable triggers must override this method.
*/
default ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) throws InvalidTriggerConfigurationException {
return computeNextEvaluationDate();
default ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) throws Exception {
return ZonedDateTime.now().plus(this.getInterval());
}
/**
* Compute the next evaluation date of the trigger: by default, it uses the current date and the interval.
* Schedulable triggers must override this method as it's used to init them when there is no evaluation date.
*/
default ZonedDateTime nextEvaluationDate() throws InvalidTriggerConfigurationException {
return computeNextEvaluationDate();
}
/**
* computes the next evaluation date using the configured interval.
* Throw InvalidTriggerConfigurationException, if the interval causes date overflow.
*/
private ZonedDateTime computeNextEvaluationDate() throws InvalidTriggerConfigurationException {
Duration interval = this.getInterval();
try {
return ZonedDateTime.now().plus(interval);
} catch (DateTimeException | ArithmeticException e) {
throw new InvalidTriggerConfigurationException("Trigger interval too large", e);
}
default ZonedDateTime nextEvaluationDate() {
return ZonedDateTime.now().plus(this.getInterval());
}
}

View File

@@ -1,40 +0,0 @@
package io.kestra.core.models.triggers;
import io.kestra.core.models.property.Property;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.Duration;
public interface StatefulTriggerInterface {
@Schema(
title = "Trigger event type",
description = """
Defines when the trigger fires.
- `CREATE`: only for newly discovered entities.
- `UPDATE`: only when an already-seen entity changes.
- `CREATE_OR_UPDATE`: fires on either event.
"""
)
Property<On> getOn();
@Schema(
title = "State key",
description = """
JSON-type KV key for persisted state.
Default: `<namespace>__<flowId>__<triggerId>`
"""
)
Property<String> getStateKey();
@Schema(
title = "State TTL",
description = "TTL for persisted state entries (e.g., PT24H, P7D)."
)
Property<Duration> getStateTtl();
enum On {
CREATE,
UPDATE,
CREATE_OR_UPDATE
}
}

View File

@@ -1,91 +0,0 @@
package io.kestra.core.models.triggers;
import com.fasterxml.jackson.core.type.TypeReference;
import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.storages.kv.KVMetadata;
import io.kestra.core.storages.kv.KVValueAndMetadata;
import java.time.Duration;
import java.time.Instant;
import java.util.*;
import java.util.stream.Collectors;
public class StatefulTriggerService {
public record Entry(String uri, String version, Instant modifiedAt, Instant lastSeenAt) {
public static Entry candidate(String uri, String version, Instant modifiedAt) {
return new Entry(uri, version, modifiedAt, null);
}
}
public record StateUpdate(boolean fire, boolean isNew) {}
public static Map<String, Entry> readState(RunContext runContext, String key, Optional<Duration> ttl) {
try {
var kv = runContext.namespaceKv(runContext.flowInfo().namespace()).getValue(key);
if (kv.isEmpty()) {
return new HashMap<>();
}
var entries = JacksonMapper.ofJson().readValue((byte[]) kv.get().value(), new TypeReference<List<Entry>>() {});
var cutoff = ttl.map(d -> Instant.now().minus(d)).orElse(Instant.MIN);
return entries.stream()
.filter(e -> Optional.ofNullable(e.lastSeenAt()).orElse(Instant.now()).isAfter(cutoff))
.collect(Collectors.toMap(Entry::uri, e -> e));
} catch (Exception e) {
runContext.logger().warn("readState failed", e);
return new HashMap<>();
}
}
public static void writeState(RunContext runContext, String key, Map<String, Entry> state, Optional<Duration> ttl) {
try {
var bytes = JacksonMapper.ofJson().writeValueAsBytes(state.values());
var meta = new KVMetadata("trigger state", ttl.orElse(null));
runContext.namespaceKv(runContext.flowInfo().namespace()).put(key, new KVValueAndMetadata(meta, bytes));
} catch (Exception e) {
runContext.logger().warn("writeState failed", e);
}
}
public static StateUpdate computeAndUpdateState(Map<String, Entry> state, Entry candidate, StatefulTriggerInterface.On on) {
var prev = state.get(candidate.uri());
var isNew = prev == null;
var fire = shouldFire(prev, candidate.version(), on);
Instant lastSeenAt;
if (fire || isNew) {
// it is new seen or changed
lastSeenAt = Instant.now();
} else if (prev.lastSeenAt() != null) {
// it is unchanged but already tracked before
lastSeenAt = prev.lastSeenAt();
} else {
lastSeenAt = Instant.now();
}
var newEntry = new Entry(candidate.uri(), candidate.version(), candidate.modifiedAt(), lastSeenAt);
state.put(candidate.uri(), newEntry);
return new StatefulTriggerService.StateUpdate(fire, isNew);
}
public static boolean shouldFire(Entry prev, String version, StatefulTriggerInterface.On on) {
if (prev == null) {
return on == StatefulTriggerInterface.On.CREATE || on == StatefulTriggerInterface.On.CREATE_OR_UPDATE;
}
if (!Objects.equals(prev.version(), version)) {
return on == StatefulTriggerInterface.On.UPDATE || on == StatefulTriggerInterface.On.CREATE_OR_UPDATE;
}
return false;
}
public static String defaultKey(String ns, String flowId, String triggerId) {
return String.join("_", ns, flowId, triggerId);
}
}

View File

@@ -1,6 +1,5 @@
package io.kestra.core.models.triggers;
import io.kestra.core.exceptions.InvalidTriggerConfigurationException;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
@@ -168,14 +167,9 @@ public class Trigger extends TriggerContext implements HasUID {
// Used to update trigger in flowListeners
public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger, ConditionContext conditionContext, Optional<Trigger> lastTrigger) throws Exception {
ZonedDateTime nextDate = null;
boolean disabled = lastTrigger.map(TriggerContext::getDisabled).orElse(Boolean.FALSE);
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
try {
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
} catch (InvalidTriggerConfigurationException e) {
disabled = true;
}
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
}
return Trigger.builder()
@@ -186,7 +180,7 @@ public class Trigger extends TriggerContext implements HasUID {
.date(ZonedDateTime.now().truncatedTo(ChronoUnit.SECONDS))
.nextExecutionDate(nextDate)
.stopAfter(abstractTrigger.getStopAfter())
.disabled(disabled)
.disabled(lastTrigger.map(TriggerContext::getDisabled).orElse(Boolean.FALSE))
.backfill(null)
.build();
}

View File

@@ -4,7 +4,6 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.utils.IdUtils;
import io.micronaut.core.annotation.Introspected;
import io.micronaut.core.annotation.Nullable;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import lombok.Getter;
@@ -47,7 +46,6 @@ public class TriggerContext {
@Nullable
private List<State.Type> stopAfter;
@Schema(defaultValue = "false")
private Boolean disabled = Boolean.FALSE;
protected TriggerContext(TriggerContextBuilder<?, ?> b) {

View File

@@ -7,7 +7,6 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Pattern;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
public interface TriggerInterface extends Plugin, PluginVersioning {
@NotNull
@@ -18,7 +17,7 @@ public interface TriggerInterface extends Plugin, PluginVersioning {
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
@Schema(title = "The class name for this current trigger.")
String getType();

View File

@@ -11,6 +11,7 @@ import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.RunContext;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.util.*;
@@ -24,7 +25,7 @@ public abstract class TriggerService {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables, runContext.logFileURI());
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext);
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext.getFlow().getRevision());
}
public static Execution generateExecution(
@@ -36,7 +37,7 @@ public abstract class TriggerService {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, output, runContext.logFileURI());
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext);
return generateExecution(runContext.getTriggerExecutionId(), trigger, context, executionTrigger, conditionContext.getFlow().getRevision());
}
public static Execution generateRealtimeExecution(
@@ -48,7 +49,7 @@ public abstract class TriggerService {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, output, runContext.logFileURI());
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext.getFlow().getRevision());
}
public static Execution generateScheduledExecution(
@@ -74,7 +75,6 @@ public abstract class TriggerService {
.namespace(context.getNamespace())
.flowId(context.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
@@ -108,7 +108,7 @@ public abstract class TriggerService {
AbstractTrigger trigger,
TriggerContext context,
ExecutionTrigger executionTrigger,
ConditionContext conditionContext
Integer flowRevision
) {
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
@@ -120,8 +120,7 @@ public abstract class TriggerService {
.namespace(context.getNamespace())
.flowId(context.getFlowId())
.tenantId(context.getTenantId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.flowRevision(flowRevision)
.state(new State())
.trigger(executionTrigger)
.labels(executionLabels)

View File

@@ -8,8 +8,6 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
@io.kestra.core.models.annotations.Plugin
@SuperBuilder(toBuilder = true)
@Getter
@@ -17,6 +15,6 @@ import static io.kestra.core.utils.RegexPatterns.JAVA_IDENTIFIER_REGEX;
public abstract class AdditionalPlugin implements Plugin {
@NotNull
@NotBlank
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
@Pattern(regexp="\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*(\\.\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)*")
protected String type;
}

View File

@@ -1,6 +1,5 @@
package io.kestra.core.plugins;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.Builder;
import java.io.File;
@@ -34,7 +33,7 @@ public record PluginArtifact(
String version,
URI uri
) implements Comparable<PluginArtifact> {
private static final Pattern ARTIFACT_PATTERN = Pattern.compile(
"([^: ]+):([^: ]+)(:([^: ]*)(:([^: ]+))?)?:([^: ]+)"
);
@@ -43,8 +42,7 @@ public record PluginArtifact(
);
public static final String JAR_EXTENSION = "jar";
public static final String KESTRA_GROUP_ID = "io.kestra";
/**
* Static helper method for constructing a new {@link PluginArtifact} from a JAR file.
*
@@ -137,11 +135,6 @@ public record PluginArtifact(
public String toString() {
return toCoordinates();
}
@JsonIgnore
public boolean isOfficial() {
return groupId.startsWith(KESTRA_GROUP_ID);
}
public String toCoordinates() {
return Stream.of(groupId, artifactId, extension, classifier, version)

View File

@@ -1,12 +1,9 @@
package io.kestra.core.plugins;
import io.kestra.core.contexts.KestraContext;
import io.kestra.core.utils.ListUtils;
import io.micronaut.core.type.Argument;
import io.micronaut.http.HttpMethod;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.HttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -18,12 +15,9 @@ import java.util.Base64;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* Services for retrieving available plugin artifacts for Kestra.
@@ -64,47 +58,6 @@ public class PluginCatalogService {
this.isLoaded.set(true);
this.plugins = CompletableFuture.supplyAsync(this::load);
}
/**
* Resolves the version for the given artifacts.
*
* @param artifacts The list of artifacts to resolve.
* @return The list of results.
*/
public List<PluginResolutionResult> resolveVersions(List<PluginArtifact> artifacts) {
if (ListUtils.isEmpty(artifacts)) {
return List.of();
}
final Map<String, ApiPluginArtifact> pluginsByGroupAndArtifactId = getAllCompatiblePlugins().stream()
.collect(Collectors.toMap(it -> it.groupId() + ":" + it.artifactId(), Function.identity()));
return artifacts.stream().map(it -> {
// Get all compatible versions for current artifact
List<String> versions = Optional
.ofNullable(pluginsByGroupAndArtifactId.get(it.groupId() + ":" + it.artifactId()))
.map(ApiPluginArtifact::versions)
.orElse(List.of());
// Try to resolve the version
String resolvedVersion = null;
if (!versions.isEmpty()) {
if (it.version().equalsIgnoreCase("LATEST")) {
resolvedVersion = versions.getFirst();
} else {
resolvedVersion = versions.contains(it.version()) ? it.version() : null;
}
}
// Build the PluginResolutionResult
return new PluginResolutionResult(
it,
resolvedVersion,
versions,
resolvedVersion != null
);
}).toList();
}
public synchronized List<PluginManifest> get() {
try {
@@ -187,26 +140,7 @@ public class PluginCatalogService {
isLoaded.set(false);
}
}
private List<ApiPluginArtifact> getAllCompatiblePlugins() {
MutableHttpRequest<Object> request = HttpRequest.create(
HttpMethod.GET,
"/v1/plugins/artifacts/core-compatibility/" + KestraContext.getContext().getVersion()
);
if (oss) {
request.getParameters().add("license", "OPENSOURCE");
}
try {
return httpClient
.toBlocking()
.exchange(request, Argument.listOf(ApiPluginArtifact.class))
.body();
} catch (Exception e) {
log.debug("Failed to retrieve available plugins from Kestra API. Cause: ", e);
return List.of();
}
}
public record PluginManifest(
String title,
String icon,
@@ -219,11 +153,4 @@ public class PluginCatalogService {
return groupId + ":" + artifactId + ":LATEST";
}
}
public record ApiPluginArtifact(
String groupId,
String artifactId,
String license,
List<String> versions
) {}
}

View File

@@ -26,6 +26,7 @@ public interface QueueFactoryInterface {
String SUBFLOWEXECUTIONRESULT_NAMED = "subflowExecutionResultQueue";
String CLUSTER_EVENT_NAMED = "clusterEventQueue";
String SUBFLOWEXECUTIONEND_NAMED = "subflowExecutionEndQueue";
String EXECUTION_RUNNING_NAMED = "executionRunningQueue";
String MULTIPLE_CONDITION_EVENT_NAMED = "multipleConditionEventQueue";
QueueInterface<Execution> execution();
@@ -58,5 +59,7 @@ public interface QueueFactoryInterface {
QueueInterface<SubflowExecutionEnd> subflowExecutionEnd();
QueueInterface<ExecutionRunning> executionRunning();
QueueInterface<MultipleConditionEvent> multipleConditionEvent();
}

View File

@@ -14,19 +14,19 @@ import java.time.Instant;
@Requires(property = "kestra.server-type")
@Slf4j
public class ReportableScheduler {
private final ReportableRegistry registry;
private final ServerEventSender sender;
private final Clock clock;
@Inject
public ReportableScheduler(ReportableRegistry registry, ServerEventSender sender) {
this.registry = registry;
this.sender = sender;
this.clock = Clock.systemDefaultZone();
}
@Scheduled(fixedDelay = "5m", initialDelay = "${kestra.anonymous-usage-report.initial-delay:5m}")
@Scheduled(fixedDelay = "5m", initialDelay = "${kestra.anonymous-usage-report.initial-delay}")
public void tick() {
Instant now = clock.instant();
for (Reportable<?> r : registry.getAll()) {

View File

@@ -18,7 +18,6 @@ import io.micronaut.http.hateoas.JsonError;
import io.micronaut.reactor.http.client.ReactorHttpClient;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import java.net.URI;
@@ -29,30 +28,29 @@ import java.util.UUID;
@Singleton
@Slf4j
public class ServerEventSender {
private static final String SESSION_UUID = IdUtils.create();
private static final ObjectMapper OBJECT_MAPPER = JacksonMapper.ofJson();
@Inject
@Client
private ReactorHttpClient client;
@Inject
private VersionProvider versionProvider;
@Inject
private InstanceService instanceService;
private final ServerType serverType;
@Setter
@Value("${kestra.anonymous-usage-report.uri:'https://api.kestra.io/v1/reports/server-events'}")
@Value("${kestra.anonymous-usage-report.uri}")
protected URI url;
public ServerEventSender( ) {
this.serverType = KestraContext.getContext().getServerType();
}
public void send(final Instant now, final Type type, Object event) {
ServerEvent serverEvent = ServerEvent
.builder()
@@ -67,11 +65,11 @@ public class ServerEventSender {
.build();
try {
MutableHttpRequest<ServerEvent> request = this.request(serverEvent, type);
if (log.isTraceEnabled()) {
log.trace("Report anonymous usage: '{}'", OBJECT_MAPPER.writeValueAsString(serverEvent));
}
this.handleResponse(client.toBlocking().retrieve(request, Argument.of(Result.class), Argument.of(JsonError.class)));
} catch (HttpClientResponseException t) {
log.trace("Unable to report anonymous usage with body '{}'", t.getResponse().getBody(String.class), t);
@@ -79,11 +77,11 @@ public class ServerEventSender {
log.trace("Unable to handle anonymous usage", t);
}
}
private void handleResponse (Result result){
}
protected MutableHttpRequest<ServerEvent> request(ServerEvent event, Type type) throws Exception {
URI baseUri = URI.create(this.url.toString().endsWith("/") ? this.url.toString() : this.url + "/");
URI resolvedUri = baseUri.resolve(type.name().toLowerCase());

View File

@@ -26,8 +26,8 @@ public interface FlowRepositoryInterface extends QueryBuilderInterface<Flows.Fie
* Used only if result is used internally and not exposed to the user.
* It is useful when we want to restart/resume a flow.
*/
default FlowWithSource findByExecutionWithoutAcl(Execution execution) {
Optional<FlowWithSource> find = this.findByIdWithSourceWithoutAcl(
default Flow findByExecutionWithoutAcl(Execution execution) {
Optional<Flow> find = this.findByIdWithoutAcl(
execution.getTenantId(),
execution.getNamespace(),
execution.getFlowId(),

View File

@@ -1,48 +0,0 @@
package io.kestra.core.repositories;
import io.kestra.core.models.FetchVersion;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.kv.PersistedKvMetadata;
import io.micronaut.data.model.Pageable;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
public interface KvMetadataRepositoryInterface extends SaveRepositoryInterface<PersistedKvMetadata> {
Optional<PersistedKvMetadata> findByName(
String tenantId,
String namespace,
String name
) throws IOException;
default ArrayListTotal<PersistedKvMetadata> find(
Pageable pageable,
String tenantId,
List<QueryFilter> filters,
boolean allowDeleted,
boolean allowExpired
) {
return this.find(pageable, tenantId, filters, allowDeleted, allowExpired, FetchVersion.LATEST);
}
ArrayListTotal<PersistedKvMetadata> find(
Pageable pageable,
String tenantId,
List<QueryFilter> filters,
boolean allowDeleted,
boolean allowExpired,
FetchVersion fetchBehavior
);
default PersistedKvMetadata delete(PersistedKvMetadata persistedKvMetadata) throws IOException {
return this.save(persistedKvMetadata.toBuilder().deleted(true).build());
}
/**
* Purge (hard delete) a list of persisted kv metadata. If no version is specified, all versions are purged.
* @param persistedKvsMetadata the list of persisted kv metadata to purge
* @return the number of purged persisted kv metadata
*/
Integer purge(List<PersistedKvMetadata> persistedKvsMetadata);
}

View File

@@ -1,31 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.models.HasUID;
import io.kestra.core.utils.IdUtils;
import jakarta.validation.constraints.NotNull;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Value;
import lombok.With;
@Value
@AllArgsConstructor
@Builder
public class ConcurrencyLimit implements HasUID {
@NotNull
String tenantId;
@NotNull
String namespace;
@NotNull
String flowId;
@With
Integer running;
@Override
public String uid() {
return IdUtils.fromPartsAndSeparator('|', this.tenantId, this.namespace, this.flowId);
}
}

View File

@@ -82,8 +82,6 @@ public abstract class FilesService {
}
private static String resolveUniqueNameForFile(final Path path) {
String filename = path.getFileName().toString();
String encodedFilename = java.net.URLEncoder.encode(filename, java.nio.charset.StandardCharsets.UTF_8);
return IdUtils.from(path.toString()) + "-" + encodedFilename;
return IdUtils.from(path.toString()) + "-" + path.toFile().getName();
}
}

View File

@@ -1,6 +1,7 @@
package io.kestra.core.runners;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.KestraRuntimeException;
@@ -64,28 +65,31 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
public class FlowInputOutput {
private static final Pattern URI_PATTERN = Pattern.compile("^[a-z]+:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b(?:[-a-zA-Z0-9()@:%_\\+.~#?&\\/=]*)$");
private static final ObjectMapper YAML_MAPPER = JacksonMapper.ofYaml();
private final StorageInterface storageInterface;
private final Optional<String> secretKey;
private final RunContextFactory runContextFactory;
private final VariableRenderer variableRenderer;
@Inject
public FlowInputOutput(
StorageInterface storageInterface,
RunContextFactory runContextFactory,
VariableRenderer variableRenderer,
@Nullable @Value("${kestra.encryption.secret-key}") String secretKey
) {
this.storageInterface = storageInterface;
this.runContextFactory = runContextFactory;
this.secretKey = Optional.ofNullable(secretKey);
this.variableRenderer = variableRenderer;
}
/**
* Validate all the inputs of a given execution of a flow.
*
* @param inputs The Flow's inputs.
* @param execution The Execution.
* @param data The Execution's inputs data.
* @param inputs The Flow's inputs.
* @param execution The Execution.
* @param data The Execution's inputs data.
* @return The list of {@link InputAndValue}.
*/
public Mono<List<InputAndValue>> validateExecutionInputs(final List<Input<?>> inputs,
@@ -93,11 +97,10 @@ public class FlowInputOutput {
final Execution execution,
final Publisher<CompletedPart> data) {
if (ListUtils.isEmpty(inputs)) return Mono.just(Collections.emptyList());
return readData(inputs, execution, data, false)
.map(inputData -> resolveInputs(inputs, flow, execution, inputData, false));
return readData(inputs, execution, data, false).map(inputData -> resolveInputs(inputs, flow, execution, inputData));
}
/**
* Reads all the inputs of a given execution of a flow.
*
@@ -111,7 +114,7 @@ public class FlowInputOutput {
final Publisher<CompletedPart> data) {
return this.readExecutionInputs(flow.getInputs(), flow, execution, data);
}
/**
* Reads all the inputs of a given execution of a flow.
*
@@ -126,11 +129,11 @@ public class FlowInputOutput {
final Publisher<CompletedPart> data) {
return readData(inputs, execution, data, true).map(inputData -> this.readExecutionInputs(inputs, flow, execution, inputData));
}
private Mono<Map<String, Object>> readData(List<Input<?>> inputs, Execution execution, Publisher<CompletedPart> data, boolean uploadFiles) {
return Flux.from(data)
.publishOn(Schedulers.boundedElastic())
.<Map.Entry<String, String>>handle((input, sink) -> {
.<AbstractMap.SimpleEntry<String, String>>handle((input, sink) -> {
if (input instanceof CompletedFileUpload fileUpload) {
boolean oldStyleInput = false;
if ("files".equals(fileUpload.getName())) {
@@ -150,7 +153,7 @@ public class FlowInputOutput {
.getContextStorageURI()
);
fileUpload.discard();
sink.next(Map.entry(inputId, from.toString()));
sink.next(new AbstractMap.SimpleEntry<>(inputId, from.toString()));
} else {
try {
final String fileExtension = FileInput.findFileInputExtension(inputs, fileName);
@@ -165,7 +168,7 @@ public class FlowInputOutput {
return;
}
URI from = storageInterface.from(execution, inputId, fileName, tempFile);
sink.next(Map.entry(inputId, from.toString()));
sink.next(new AbstractMap.SimpleEntry<>(inputId, from.toString()));
} finally {
if (!tempFile.delete()) {
tempFile.deleteOnExit();
@@ -178,13 +181,13 @@ public class FlowInputOutput {
}
} else {
try {
sink.next(Map.entry(input.getName(), new String(input.getBytes())));
sink.next(new AbstractMap.SimpleEntry<>(input.getName(), new String(input.getBytes())));
} catch (IOException e) {
sink.error(e);
}
}
})
.collectMap(Map.Entry::getKey, Map.Entry::getValue);
.collectMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue);
}
/**
@@ -209,7 +212,7 @@ public class FlowInputOutput {
final Execution execution,
final Map<String, ?> data
) {
Map<String, Object> resolved = this.resolveInputs(inputs, flow, execution, data, true)
Map<String, Object> resolved = this.resolveInputs(inputs, flow, execution, data)
.stream()
.filter(InputAndValue::enabled)
.map(it -> {
@@ -235,7 +238,7 @@ public class FlowInputOutput {
}
return MapUtils.flattenToNestedMap(resolved);
}
/**
* Utility method for retrieving types inputs.
*
@@ -244,21 +247,12 @@ public class FlowInputOutput {
* @param data The Execution's inputs data.
* @return The Map of typed inputs.
*/
@VisibleForTesting
public List<InputAndValue> resolveInputs(
final List<Input<?>> inputs,
final FlowInterface flow,
final Execution execution,
final Map<String, ?> data
) {
return resolveInputs(inputs, flow, execution, data, true);
}
public List<InputAndValue> resolveInputs(
final List<Input<?>> inputs,
final FlowInterface flow,
final Execution execution,
final Map<String, ?> data,
final boolean decryptSecrets
) {
if (inputs == null) {
return Collections.emptyList();
@@ -268,7 +262,7 @@ public class FlowInputOutput {
.map(input -> ResolvableInput.of(input,data.get(input.getId())))
.collect(Collectors.toMap(it -> it.get().input().getId(), Function.identity(), (o1, o2) -> o1, LinkedHashMap::new)));
resolvableInputMap.values().forEach(input -> resolveInputValue(input, flow, execution, resolvableInputMap, decryptSecrets));
resolvableInputMap.values().forEach(input -> resolveInputValue(input, flow, execution, resolvableInputMap));
return resolvableInputMap.values().stream().map(ResolvableInput::get).toList();
}
@@ -278,8 +272,7 @@ public class FlowInputOutput {
final @NotNull ResolvableInput resolvable,
final FlowInterface flow,
final @NotNull Execution execution,
final @NotNull Map<String, ResolvableInput> inputs,
final boolean decryptSecrets) {
final @NotNull Map<String, ResolvableInput> inputs) {
// return immediately if the input is already resolved
if (resolvable.isResolved()) return resolvable.get();
@@ -287,10 +280,9 @@ public class FlowInputOutput {
Input<?> input = resolvable.get().input();
try {
// Resolve all input dependencies and check whether input is enabled
// Note: Secrets are always decrypted here because they can be part of expressions used to render inputs such as SELECT & MULTI_SELECT.
final Map<String, InputAndValue> dependencies = resolveAllDependentInputs(input, flow, execution, inputs, true);
final RunContext runContext = buildRunContextForExecutionAndInputs(flow, execution, dependencies, true);
// resolve all input dependencies and check whether input is enabled
final Map<String, InputAndValue> dependencies = resolveAllDependentInputs(input, flow, execution, inputs);
final RunContext runContext = buildRunContextForExecutionAndInputs(flow, execution, dependencies);
boolean isInputEnabled = dependencies.isEmpty() || dependencies.values().stream().allMatch(InputAndValue::enabled);
@@ -325,13 +317,13 @@ public class FlowInputOutput {
}
});
resolvable.setInput(input);
Object value = resolvable.get().value();
// resolve default if needed
if (value == null && input.getDefaults() != null) {
RunContext runContextForDefault = decryptSecrets ? runContext : buildRunContextForExecutionAndInputs(flow, execution, dependencies, false);
value = resolveDefaultValue(input, runContextForDefault);
value = resolveDefaultValue(input, runContext);
resolvable.isDefault(true);
}
@@ -390,7 +382,7 @@ public class FlowInputOutput {
return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
}
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies) {
Map<String, Object> flattenInputs = MapUtils.flattenToNestedMap(dependencies.entrySet()
.stream()
.collect(HashMap::new, (m, v) -> m.put(v.getKey(), v.getValue().value()), HashMap::putAll)
@@ -404,10 +396,10 @@ public class FlowInputOutput {
flattenInputs.put(input.getId(), null);
}
}
return runContextFactory.of(flow, execution, vars -> vars.withInputs(flattenInputs), decryptSecrets);
return runContextFactory.of(flow, execution, vars -> vars.withInputs(flattenInputs));
}
private Map<String, InputAndValue> resolveAllDependentInputs(final Input<?> input, final FlowInterface flow, final Execution execution, final Map<String, ResolvableInput> inputs, final boolean decryptSecrets) {
private Map<String, InputAndValue> resolveAllDependentInputs(final Input<?> input, final FlowInterface flow, final Execution execution, final Map<String, ResolvableInput> inputs) {
return Optional.ofNullable(input.getDependsOn())
.map(DependsOn::inputs)
.stream()
@@ -415,7 +407,7 @@ public class FlowInputOutput {
.filter(id -> !id.equals(input.getId()))
.map(inputs::get)
.filter(Objects::nonNull) // input may declare unknown or non-necessary dependencies. Let's ignore.
.map(it -> resolveInputValue(it, flow, execution, inputs, decryptSecrets))
.map(it -> resolveInputValue(it, flow, execution, inputs))
.collect(Collectors.toMap(it -> it.input().getId(), Function.identity()));
}

View File

@@ -84,12 +84,6 @@ public class FlowableUtils {
return Collections.emptyList();
}
// have submitted, leave
Optional<TaskRun> lastSubmitted = execution.findLastSubmitted(taskRuns);
if (lastSubmitted.isPresent()) {
return Collections.emptyList();
}
// have running, leave
Optional<TaskRun> lastRunning = execution.findLastRunning(taskRuns);
if (lastRunning.isPresent()) {

View File

@@ -176,10 +176,6 @@ public abstract class RunContext implements PropertyContext {
*/
public abstract KVStore namespaceKv(String namespace);
/**
* @deprecated use #namespaceKv(String) instead
*/
@Deprecated(since = "1.1.0", forRemoval = true)
public StateStore stateStore() {
return new StateStore(this, true);
}

View File

@@ -41,9 +41,6 @@ public class RunContextFactory {
@Inject
protected VariableRenderer variableRenderer;
@Inject
protected SecureVariableRendererFactory secureVariableRendererFactory;
@Inject
protected StorageInterface storageInterface;
@@ -85,33 +82,22 @@ public class RunContextFactory {
public RunContext of(FlowInterface flow, Execution execution) {
return of(flow, execution, Function.identity());
}
public RunContext of(FlowInterface flow, Execution execution, boolean decryptVariable) {
return of(flow, execution, Function.identity(), decryptVariable);
}
public RunContext of(FlowInterface flow, Execution execution, Function<RunVariables.Builder, RunVariables.Builder> runVariableModifier) {
return of(flow, execution, runVariableModifier, true);
}
public RunContext of(FlowInterface flow, Execution execution, Function<RunVariables.Builder, RunVariables.Builder> runVariableModifier, boolean decryptVariables) {
RunContextLogger runContextLogger = runContextLoggerFactory.create(execution);
VariableRenderer variableRenderer = decryptVariables ? this.variableRenderer : secureVariableRendererFactory.createOrGet();
return newBuilder()
// Logger
.withLogger(runContextLogger)
// Execution
.withPluginConfiguration(Map.of())
.withStorage(new InternalStorage(runContextLogger.logger(), StorageContext.forExecution(execution), storageInterface, flowService))
.withVariableRenderer(variableRenderer)
.withVariables(runVariableModifier.apply(
newRunVariablesBuilder()
.withFlow(flow)
.withExecution(execution)
.withDecryptVariables(decryptVariables)
.withSecretInputs(secretInputsFromFlow(flow))
newRunVariablesBuilder()
.withFlow(flow)
.withExecution(execution)
.withDecryptVariables(true)
.withSecretInputs(secretInputsFromFlow(flow))
)
.build(runContextLogger, PropertyContext.create(variableRenderer)))
.withSecretInputs(secretInputsFromFlow(flow))
@@ -123,7 +109,7 @@ public class RunContextFactory {
}
public RunContext of(FlowInterface flow, Task task, Execution execution, TaskRun taskRun, boolean decryptVariables) {
return this.of(flow, task, execution, taskRun, decryptVariables, this.variableRenderer);
return this.of(flow, task, execution, taskRun, decryptVariables, variableRenderer);
}
public RunContext of(FlowInterface flow, Task task, Execution execution, TaskRun taskRun, boolean decryptVariables, VariableRenderer variableRenderer) {
@@ -161,7 +147,7 @@ public class RunContextFactory {
.withFlow(flow)
.withTrigger(trigger)
.withSecretInputs(secretInputsFromFlow(flow))
.build(runContextLogger, PropertyContext.create(this.variableRenderer))
.build(runContextLogger, PropertyContext.create(variableRenderer))
)
.withSecretInputs(secretInputsFromFlow(flow))
.withTrigger(trigger)
@@ -240,7 +226,7 @@ public class RunContextFactory {
// inject mandatory services and config
.withApplicationContext(applicationContext) // TODO - ideally application should not be injected here
.withMeterRegistry(metricRegistry)
.withVariableRenderer(this.variableRenderer)
.withVariableRenderer(variableRenderer)
.withStorageInterface(storageInterface)
.withSecretKey(secretKey)
.withWorkingDir(workingDirFactory.createWorkingDirectory())

View File

@@ -10,6 +10,7 @@ import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.input.SecretInput;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.property.PropertyContext;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger;
@@ -99,7 +100,7 @@ public final class RunVariables {
* @return a new immutable {@link Map}.
*/
static Map<String, Object> of(final AbstractTrigger trigger) {
return Map.of(
return ImmutableMap.of(
"id", trigger.getId(),
"type", trigger.getType()
);
@@ -281,15 +282,12 @@ public final class RunVariables {
}
if (flow != null && flow.getInputs() != null) {
// Create a new PropertyContext with 'flow' variables which are required by some pebble expressions.
PropertyContextWithVariables context = new PropertyContextWithVariables(propertyContext, Map.of("flow", RunVariables.of(flow)));
// we add default inputs value from the flow if not already set, this will be useful for triggers
flow.getInputs().stream()
.filter(input -> input.getDefaults() != null && !inputs.containsKey(input.getId()))
.forEach(input -> {
try {
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, context));
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, propertyContext));
} catch (IllegalVariableEvaluationException e) {
// Silent catch, if an input depends on another input, or a variable that is populated at runtime / input filling time, we can't resolve it here.
}
@@ -393,20 +391,4 @@ public final class RunVariables {
}
private RunVariables(){}
private record PropertyContextWithVariables(
PropertyContext delegate,
Map<String, Object> variables
) implements PropertyContext {
@Override
public String render(String inline, Map<String, Object> variables) throws IllegalVariableEvaluationException {
return delegate.render(inline, variables.isEmpty() ? this.variables : variables);
}
@Override
public Map<String, Object> render(Map<String, Object> inline, Map<String, Object> variables) throws IllegalVariableEvaluationException {
return delegate.render(inline, variables.isEmpty() ? this.variables : variables);
}
}
}

View File

@@ -1,39 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.runners.pebble.PebbleEngineFactory;
import io.kestra.core.runners.pebble.functions.SecretFunction;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.util.List;
@Singleton
public class SecureVariableRendererFactory {
private final PebbleEngineFactory pebbleEngineFactory;
private final ApplicationContext applicationContext;
private VariableRenderer secureVariableRenderer;
@Inject
public SecureVariableRendererFactory(ApplicationContext applicationContext, PebbleEngineFactory pebbleEngineFactory) {
this.pebbleEngineFactory = pebbleEngineFactory;
this.applicationContext = applicationContext;
}
/**
* Creates or returns the existing secured {@link VariableRenderer} instance.
*
* @return the secured {@link VariableRenderer} instance
*/
public synchronized VariableRenderer createOrGet() {
if (this.secureVariableRenderer == null) {
// Explicitly create a new instance through the application context to ensure
// eventual custom VariableRenderer implementation is used
secureVariableRenderer = applicationContext.createBean(VariableRenderer.class);
secureVariableRenderer.setPebbleEngine(pebbleEngineFactory.createWithMaskedFunctions(secureVariableRenderer, List.of(SecretFunction.NAME)));
}
return secureVariableRenderer;
}
}

View File

@@ -2,44 +2,121 @@ package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.runners.pebble.*;
import io.kestra.core.runners.pebble.functions.RenderingFunctionInterface;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.annotation.ConfigurationProperties;
import io.micronaut.core.annotation.Nullable;
import io.pebbletemplates.pebble.PebbleEngine;
import io.pebbletemplates.pebble.error.AttributeNotFoundException;
import io.pebbletemplates.pebble.error.PebbleException;
import io.pebbletemplates.pebble.extension.Extension;
import io.pebbletemplates.pebble.extension.Function;
import io.pebbletemplates.pebble.template.PebbleTemplate;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.Getter;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Proxy;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@Singleton
public class VariableRenderer {
private static final Pattern RAW_PATTERN = Pattern.compile("(\\{%-*\\s*raw\\s*-*%}(.*?)\\{%-*\\s*endraw\\s*-*%})");
public static final int MAX_RENDERING_AMOUNT = 100;
private PebbleEngine pebbleEngine;
private final PebbleEngine pebbleEngine;
private final VariableConfiguration variableConfiguration;
@Inject
public VariableRenderer(ApplicationContext applicationContext, @Nullable VariableConfiguration variableConfiguration) {
this(applicationContext.getBean(PebbleEngineFactory.class), variableConfiguration);
this(applicationContext, variableConfiguration, Collections.emptyList());
}
public VariableRenderer(PebbleEngineFactory pebbleEngineFactory, @Nullable VariableConfiguration variableConfiguration) {
public VariableRenderer(ApplicationContext applicationContext, @Nullable VariableConfiguration variableConfiguration, List<String> functionsToMask) {
this.variableConfiguration = variableConfiguration != null ? variableConfiguration : new VariableConfiguration();
this.pebbleEngine = pebbleEngineFactory.create();
PebbleEngine.Builder pebbleBuilder = new PebbleEngine.Builder()
.registerExtensionCustomizer(ExtensionCustomizer::new)
.strictVariables(true)
.cacheActive(this.variableConfiguration.getCacheEnabled())
.newLineTrimming(false)
.autoEscaping(false);
List<Extension> extensions = applicationContext.getBeansOfType(Extension.class).stream()
.map(e -> functionsToMask.stream().anyMatch(excludedFunction -> e.getFunctions().containsKey(excludedFunction))
? extensionWithMaskedFunctions(e, functionsToMask)
: e)
.toList();
extensions.forEach(pebbleBuilder::extension);
if (this.variableConfiguration.getCacheEnabled()) {
pebbleBuilder.templateCache(new PebbleLruCache(this.variableConfiguration.getCacheSize()));
}
this.pebbleEngine = pebbleBuilder.build();
}
public void setPebbleEngine(final PebbleEngine pebbleEngine) {
this.pebbleEngine = pebbleEngine;
private Extension extensionWithMaskedFunctions(Extension initialExtension, List<String> maskedFunctions) {
return (Extension) Proxy.newProxyInstance(
initialExtension.getClass().getClassLoader(),
new Class[]{Extension.class},
(proxy, method, methodArgs) -> {
if (method.getName().equals("getFunctions")) {
return initialExtension.getFunctions().entrySet().stream()
.map(entry -> {
if (maskedFunctions.contains(entry.getKey())) {
return Map.entry(entry.getKey(), this.maskedFunctionProxy(entry.getValue()));
} else if (RenderingFunctionInterface.class.isAssignableFrom(entry.getValue().getClass())) {
return Map.entry(entry.getKey(), this.variableRendererProxy(entry.getValue()));
}
return entry;
}).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
return method.invoke(initialExtension, methodArgs);
}
);
}
private Function variableRendererProxy(Function initialFunction) {
return (Function) Proxy.newProxyInstance(
initialFunction.getClass().getClassLoader(),
new Class[]{Function.class, RenderingFunctionInterface.class},
(functionProxy, functionMethod, functionArgs) -> {
if (functionMethod.getName().equals("variableRenderer")) {
return this;
}
return functionMethod.invoke(initialFunction, functionArgs);
}
);
}
private Function maskedFunctionProxy(Function initialFunction) {
return (Function) Proxy.newProxyInstance(
initialFunction.getClass().getClassLoader(),
new Class[]{Function.class},
(functionProxy, functionMethod, functionArgs) -> {
Object result;
try {
result = functionMethod.invoke(initialFunction, functionArgs);
} catch (InvocationTargetException e) {
throw e.getCause();
}
if (functionMethod.getName().equals("execute")) {
return "******";
}
return result;
}
);
}
public static IllegalVariableEvaluationException properPebbleException(PebbleException initialExtension) {
if (initialExtension instanceof AttributeNotFoundException current) {
return new IllegalVariableEvaluationException(

View File

@@ -1,118 +0,0 @@
package io.kestra.core.runners.pebble;
import io.kestra.core.runners.VariableRenderer;
import io.kestra.core.runners.pebble.functions.RenderingFunctionInterface;
import io.micronaut.context.ApplicationContext;
import io.micronaut.core.annotation.Nullable;
import io.pebbletemplates.pebble.PebbleEngine;
import io.pebbletemplates.pebble.extension.Extension;
import io.pebbletemplates.pebble.extension.Function;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Proxy;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Singleton
public class PebbleEngineFactory {
private final ApplicationContext applicationContext;
private final VariableRenderer.VariableConfiguration variableConfiguration;
@Inject
public PebbleEngineFactory(ApplicationContext applicationContext, @Nullable VariableRenderer.VariableConfiguration variableConfiguration) {
this.applicationContext = applicationContext;
this.variableConfiguration = variableConfiguration;
}
public PebbleEngine create() {
PebbleEngine.Builder builder = newPebbleEngineBuilder();
this.applicationContext.getBeansOfType(Extension.class).forEach(builder::extension);
return builder.build();
}
public PebbleEngine createWithMaskedFunctions(VariableRenderer renderer, final List<String> functionsToMask) {
PebbleEngine.Builder builder = newPebbleEngineBuilder();
this.applicationContext.getBeansOfType(Extension.class).stream()
.map(e -> functionsToMask.stream().anyMatch(fun -> e.getFunctions().containsKey(fun))
? extensionWithMaskedFunctions(renderer, e, functionsToMask)
: e)
.forEach(builder::extension);
return builder.build();
}
private PebbleEngine.Builder newPebbleEngineBuilder() {
PebbleEngine.Builder builder = new PebbleEngine.Builder()
.registerExtensionCustomizer(ExtensionCustomizer::new)
.strictVariables(true)
.cacheActive(this.variableConfiguration.getCacheEnabled())
.newLineTrimming(false)
.autoEscaping(false);
if (this.variableConfiguration.getCacheEnabled()) {
builder = builder.templateCache(new PebbleLruCache(this.variableConfiguration.getCacheSize()));
}
return builder;
}
private Extension extensionWithMaskedFunctions(VariableRenderer renderer, Extension initialExtension, List<String> maskedFunctions) {
return (Extension) Proxy.newProxyInstance(
initialExtension.getClass().getClassLoader(),
new Class[]{Extension.class},
(proxy, method, methodArgs) -> {
if (method.getName().equals("getFunctions")) {
return initialExtension.getFunctions().entrySet().stream()
.map(entry -> {
if (maskedFunctions.contains(entry.getKey())) {
return Map.entry(entry.getKey(), this.maskedFunctionProxy(entry.getValue()));
} else if (RenderingFunctionInterface.class.isAssignableFrom(entry.getValue().getClass())) {
return Map.entry(entry.getKey(), this.variableRendererProxy(renderer, entry.getValue()));
}
return entry;
}).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
return method.invoke(initialExtension, methodArgs);
}
);
}
private Function variableRendererProxy(VariableRenderer renderer, Function initialFunction) {
return (Function) Proxy.newProxyInstance(
initialFunction.getClass().getClassLoader(),
new Class[]{Function.class, RenderingFunctionInterface.class},
(functionProxy, functionMethod, functionArgs) -> {
if (functionMethod.getName().equals("variableRenderer")) {
return renderer;
}
return functionMethod.invoke(initialFunction, functionArgs);
}
);
}
private Function maskedFunctionProxy(Function initialFunction) {
return (Function) Proxy.newProxyInstance(
initialFunction.getClass().getClassLoader(),
new Class[]{Function.class},
(functionProxy, functionMethod, functionArgs) -> {
Object result;
try {
result = functionMethod.invoke(initialFunction, functionArgs);
} catch (InvocationTargetException e) {
throw e.getCause();
}
if (functionMethod.getName().equals("execute")) {
return "******";
}
return result;
}
);
}
}

View File

@@ -1,15 +1,14 @@
package io.kestra.core.runners.pebble.filters;
import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import io.pebbletemplates.pebble.error.PebbleException;
import io.pebbletemplates.pebble.extension.Filter;
import io.pebbletemplates.pebble.template.EvaluationContext;
import io.pebbletemplates.pebble.template.PebbleTemplate;
import java.util.List;
import java.util.Map;
public class ChunkFilter implements Filter {
@Override
public List<String> getArgumentNames() {
@@ -31,10 +30,6 @@ public class ChunkFilter implements Filter {
throw new PebbleException(null, "'chunk' filter can only be applied to List. Actual type was: " + input.getClass().getName(), lineNumber, self.getName());
}
Object sizeObj = args.get("size");
if (!(sizeObj instanceof Number)) {
throw new PebbleException(null, "'chunk' filter argument 'size' must be a number. Actual type was: " + sizeObj.getClass().getName(), lineNumber, self.getName());
}
return Lists.partition((List) input, ((Number) sizeObj).intValue());
return Lists.partition((List) input, ((Long) args.get("size")).intValue());
}
}

View File

@@ -17,17 +17,12 @@ import java.util.List;
import java.util.Map;
public class JqFilter implements Filter {
// Load Scope once as static to avoid repeated initialization
// This improves performance by loading builtin functions only once when the class loads
private static final Scope SCOPE;
private final Scope scope;
private final List<String> argumentNames = new ArrayList<>();
static {
SCOPE = Scope.newEmptyScope();
BuiltinFunctionLoader.getInstance().loadFunctions(Versions.JQ_1_6, SCOPE);
}
public JqFilter() {
scope = Scope.newEmptyScope();
BuiltinFunctionLoader.getInstance().loadFunctions(Versions.JQ_1_6, scope);
this.argumentNames.add("expression");
}
@@ -48,7 +43,10 @@ public class JqFilter implements Filter {
String pattern = (String) args.get("expression");
Scope rootScope = Scope.newEmptyScope();
BuiltinFunctionLoader.getInstance().loadFunctions(Versions.JQ_1_6, rootScope);
try {
JsonQuery q = JsonQuery.compile(pattern, Versions.JQ_1_6);
JsonNode in;
@@ -61,7 +59,7 @@ public class JqFilter implements Filter {
final List<Object> out = new ArrayList<>();
try {
q.apply(Scope.newChildScope(SCOPE), in, v -> {
q.apply(scope, in, v -> {
if (v instanceof TextNode) {
out.add(v.textValue());
} else if (v instanceof NullNode) {

View File

@@ -38,7 +38,7 @@ public class KvFunction implements Function {
String key = getKey(args, self, lineNumber);
String namespace = (String) args.get(NAMESPACE_ARG);
boolean errorOnMissing = Optional.ofNullable((Boolean) args.get(ERROR_ON_MISSING_ARG)).orElse(true);
Boolean errorOnMissing = Optional.ofNullable((Boolean) args.get(ERROR_ON_MISSING_ARG)).orElse(true);
Map<String, String> flow = (Map<String, String>) context.getVariable("flow");
String flowNamespace = flow.get(NAMESPACE_ARG);
@@ -53,16 +53,11 @@ public class KvFunction implements Function {
// we didn't check allowedNamespace here as it's checked in the kvStoreService itself
value = kvStoreService.get(flowTenantId, namespace, flowNamespace).getValue(key);
}
} catch (ResourceExpiredException e) {
if (errorOnMissing) {
throw new PebbleException(e, e.getMessage(), lineNumber, self.getName());
}
value = Optional.empty();
} catch (Exception e) {
throw new PebbleException(e, e.getMessage(), lineNumber, self.getName());
}
if (value.isEmpty() && errorOnMissing) {
if (value.isEmpty() && errorOnMissing == Boolean.TRUE) {
throw new PebbleException(null, "The key '" + key + "' does not exist in the namespace '" + namespace + "'.", lineNumber, self.getName());
}
@@ -90,4 +85,4 @@ public class KvFunction implements Function {
return (String) args.get(KEY_ARGS);
}
}
}

View File

@@ -1,29 +1,21 @@
package io.kestra.core.secret;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.repositories.ArrayListTotal;
import io.micronaut.data.model.Pageable;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.Strings;
import java.io.IOException;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@Singleton
@Slf4j
public class SecretService<META> {
public class SecretService {
private static final String SECRET_PREFIX = "SECRET_";
private Map<String, String> decodedSecrets;
@PostConstruct
private void postConstruct() {
this.decode();
@@ -54,28 +46,6 @@ public class SecretService<META> {
return secret;
}
public ArrayListTotal<META> list(Pageable pageable, String tenantId, List<QueryFilter> filters) throws IOException {
final Predicate<String> queryPredicate = filters.stream()
.filter(filter -> filter.field().equals(QueryFilter.Field.QUERY) && filter.value() != null)
.findFirst()
.map(filter -> {
if (filter.operation().equals(QueryFilter.Op.EQUALS)) {
return (Predicate<String>) s -> Strings.CI.contains(s, (String) filter.value());
} else if (filter.operation().equals(QueryFilter.Op.NOT_EQUALS)) {
return (Predicate<String>) s -> !Strings.CI.contains(s, (String) filter.value());
} else {
throw new IllegalArgumentException("Unsupported operation for QUERY filter: " + filter.operation());
}
})
.orElse(s -> true);
//noinspection unchecked
return ArrayListTotal.of(
pageable,
decodedSecrets.keySet().stream().filter(queryPredicate).map(s -> (META) s).toList()
);
}
public Map<String, Set<String>> inheritedSecrets(String tenantId, String namespace) throws IOException {
return Map.of(namespace, decodedSecrets.keySet());
}

View File

@@ -140,7 +140,7 @@ public final class JacksonMapper {
return mapper
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
.setDefaultPropertyInclusion(JsonInclude.Include.NON_NULL)
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
.registerModule(new JavaTimeModule())
.registerModule(new Jdk8Module())
.registerModule(new ParameterNamesModule())
@@ -153,7 +153,7 @@ public final class JacksonMapper {
private static ObjectMapper createIonObjectMapper() {
return configure(new IonObjectMapper(new IonFactory(createIonSystem())))
.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS)
.setSerializationInclusion(JsonInclude.Include.ALWAYS)
.registerModule(new IonModule());
}
@@ -172,7 +172,7 @@ public final class JacksonMapper {
return Pair.of(patch, revert);
}
public static JsonNode applyPatchesOnJsonNode(JsonNode jsonObject, List<JsonNode> patches) {
for (JsonNode patch : patches) {
try {

View File

@@ -3,21 +3,15 @@ package io.kestra.core.services;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.ConcurrencyLimit;
import jakarta.inject.Singleton;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
/**
* Contains methods to manage concurrency limit.
* This is designed to be used by the API, the executor use lower level primitives.
*/
public interface ConcurrencyLimitService {
@Singleton
public class ConcurrencyLimitService {
Set<State.Type> VALID_TARGET_STATES =
protected static final Set<State.Type> VALID_TARGET_STATES =
EnumSet.of(State.Type.RUNNING, State.Type.CANCELLED, State.Type.FAILED);
/**
@@ -25,20 +19,18 @@ public interface ConcurrencyLimitService {
*
* @throws IllegalArgumentException in case the execution is not queued.
*/
Execution unqueue(Execution execution, State.Type state) throws QueueException;
public Execution unqueue(Execution execution, State.Type state) throws QueueException {
if (execution.getState().getCurrent() != State.Type.QUEUED) {
throw new IllegalArgumentException("Only QUEUED execution can be unqueued");
}
/**
* Find concurrency limits.
*/
List<ConcurrencyLimit> find(String tenantId);
state = (state == null) ? State.Type.RUNNING : state;
/**
* Update a concurrency limit.
*/
ConcurrencyLimit update(ConcurrencyLimit concurrencyLimit);
// Validate the target state, throwing an exception if the state is invalid
if (!VALID_TARGET_STATES.contains(state)) {
throw new IllegalArgumentException("Invalid target state: " + state + ". Valid states are: " + VALID_TARGET_STATES);
}
/**
* Find a concurrency limit by its identifier.
*/
Optional<ConcurrencyLimit> findById(String tenantId, String namespace, String flowId);
return execution.withState(state);
}
}

Some files were not shown because too many files have changed in this diff Show More