Compare commits

..

1 Commits

Author SHA1 Message Date
Roman Acevedo
9575cc1c87 fake commit to run flaky tests 2025-12-05 18:07:27 +01:00
334 changed files with 4192 additions and 6492 deletions

View File

@@ -63,9 +63,9 @@ You can also build it from a terminal using `./gradlew build`, the Gradle wrappe
- Configure the following environment variables: - Configure the following environment variables:
- `MICRONAUT_ENVIRONMENTS`: can be set to any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`. - `MICRONAUT_ENVIRONMENTS`: can be set to any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`.
- `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on startup. - `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on startup.
- See the screenshot below for an example: ![Intellij IDEA Configuration ](./assets/run-app.png) - See the screenshot below for an example: ![Intellij IDEA Configuration ](run-app.png)
- If you encounter **JavaScript memory heap out** error during startup, configure `NODE_OPTIONS` environment variable with some large value. - If you encounter **JavaScript memory heap out** error during startup, configure `NODE_OPTIONS` environment variable with some large value.
- Example `NODE_OPTIONS: --max-old-space-size=4096` or `NODE_OPTIONS: --max-old-space-size=8192` ![Intellij IDEA Configuration ](./assets/node_option_env_var.png) - Example `NODE_OPTIONS: --max-old-space-size=4096` or `NODE_OPTIONS: --max-old-space-size=8192` ![Intellij IDEA Configuration ](node_option_env_var.png)
- The server starts by default on port 8080 and is reachable on `http://localhost:8080` - The server starts by default on port 8080 and is reachable on `http://localhost:8080`
If you want to launch all tests, you need Python and some packages installed on your machine, on Ubuntu you can install them with: If you want to launch all tests, you need Python and some packages installed on your machine, on Ubuntu you can install them with:

View File

@@ -51,7 +51,7 @@ updates:
storybook: storybook:
applies-to: version-updates applies-to: version-updates
patterns: ["storybook*", "@storybook/*", "eslint-plugin-storybook"] patterns: ["storybook*", "@storybook/*"]
vitest: vitest:
applies-to: version-updates applies-to: version-updates
@@ -67,10 +67,10 @@ updates:
"@types/*", "@types/*",
"storybook*", "storybook*",
"@storybook/*", "@storybook/*",
"eslint-plugin-storybook",
"vitest", "vitest",
"@vitest/*", "@vitest/*",
# Temporary exclusion of these packages from major updates # Temporary exclusion of these packages from major updates
"eslint-plugin-storybook",
"eslint-plugin-vue", "eslint-plugin-vue",
] ]
@@ -84,7 +84,6 @@ updates:
"@types/*", "@types/*",
"storybook*", "storybook*",
"@storybook/*", "@storybook/*",
"eslint-plugin-storybook",
"vitest", "vitest",
"@vitest/*", "@vitest/*",
# Temporary exclusion of these packages from minor updates # Temporary exclusion of these packages from minor updates
@@ -103,7 +102,6 @@ updates:
"@types/*", "@types/*",
"storybook*", "storybook*",
"@storybook/*", "@storybook/*",
"eslint-plugin-storybook",
"vitest", "vitest",
"@vitest/*", "@vitest/*",
] ]

View File

Before

Width:  |  Height:  |  Size: 130 KiB

After

Width:  |  Height:  |  Size: 130 KiB

View File

@@ -12,7 +12,7 @@ _Example: Replaces legacy scroll directive with the new API._
### 🔗 Related Issue ### 🔗 Related Issue
Which issue does this PR resolve? Use [GitHub Keywords](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/using-keywords-in-issues-and-pull-requests#linking-a-pull-request-to-an-issue) to automatically link the pull request to the issue. Which issue does this PR resolve? Use [GitHub Keywords](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/using-keywords-in-issues-and-pull-requests#linking-a-pull-request-to-an-issue) to automatically link the pull request to the issue.
_Example: Closes https://github.com/kestra-io/kestra/issues/ISSUE_NUMBER._ _Example: Closes https://github.com/kestra-io/kestra/issues/12345._
### 🎨 Frontend Checklist ### 🎨 Frontend Checklist

View File

Before

Width:  |  Height:  |  Size: 210 KiB

After

Width:  |  Height:  |  Size: 210 KiB

View File

@@ -64,7 +64,6 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
publish-develop-maven: publish-develop-maven:

View File

@@ -32,4 +32,3 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}

View File

@@ -43,7 +43,7 @@ jobs:
# Upload dependency check report # Upload dependency check report
- name: Upload dependency check report - name: Upload dependency check report
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
if: ${{ always() }} if: ${{ always() }}
with: with:
name: dependency-check-report name: dependency-check-report

View File

@@ -29,8 +29,8 @@ start_time2=$(date +%s)
echo "cd ./ui" echo "cd ./ui"
cd ./ui cd ./ui
echo "npm ci" echo "npm i"
npm ci npm i
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"' echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION" ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"

View File

@@ -21,7 +21,7 @@ plugins {
// test // test
id "com.adarshr.test-logger" version "4.0.0" id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.2.1.6560" id "org.sonarqube" version "7.1.0.6387"
id 'jacoco-report-aggregation' id 'jacoco-report-aggregation'
// helper // helper
@@ -171,22 +171,13 @@ allprojects {
subprojects {subProj -> subprojects {subProj ->
if (subProj.name != 'platform' && subProj.name != 'jmh-benchmarks') { if (subProj.name != 'platform' && subProj.name != 'jmh-benchmarks') {
apply plugin: "com.adarshr.test-logger" apply plugin: "com.adarshr.test-logger"
apply plugin: 'jacoco'
java { java {
sourceCompatibility = targetJavaVersion sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion targetCompatibility = targetJavaVersion
} }
configurations {
agent {
canBeResolved = true
canBeConsumed = true
}
}
dependencies { dependencies {
// Platform // Platform
testAnnotationProcessor enforcedPlatform(project(":platform")) testAnnotationProcessor enforcedPlatform(project(":platform"))
@@ -213,17 +204,9 @@ subprojects {subProj ->
//assertj //assertj
testImplementation 'org.assertj:assertj-core' testImplementation 'org.assertj:assertj-core'
agent "org.aspectj:aspectjweaver:1.9.25.1"
testImplementation platform("io.qameta.allure:allure-bom")
testImplementation "io.qameta.allure:allure-junit5"
} }
def commonTestConfig = { Test t -> def commonTestConfig = { Test t ->
t.ignoreFailures = true
t.finalizedBy jacocoTestReport
// set Xmx for test workers // set Xmx for test workers
t.maxHeapSize = '4g' t.maxHeapSize = '4g'
@@ -249,52 +232,6 @@ subprojects {subProj ->
// } // }
} }
tasks.register('integrationTest', Test) { Test t ->
description = 'Runs integration tests'
group = 'verification'
useJUnitPlatform {
includeTags 'integration'
}
testClassesDirs = sourceSets.test.output.classesDirs
classpath = sourceSets.test.runtimeClasspath
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
// Integration tests typically not parallel (but you can enable)
maxParallelForks = 1
commonTestConfig(t)
}
tasks.register('unitTest', Test) { Test t ->
description = 'Runs unit tests'
group = 'verification'
useJUnitPlatform {
excludeTags 'flaky', 'integration'
}
testClassesDirs = sourceSets.test.output.classesDirs
classpath = sourceSets.test.runtimeClasspath
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
commonTestConfig(t)
}
tasks.register('flakyTest', Test) { Test t -> tasks.register('flakyTest', Test) { Test t ->
group = 'verification' group = 'verification'
description = 'Runs tests tagged @Flaky but does not fail the build.' description = 'Runs tests tagged @Flaky but does not fail the build.'
@@ -302,6 +239,7 @@ subprojects {subProj ->
useJUnitPlatform { useJUnitPlatform {
includeTags 'flaky' includeTags 'flaky'
} }
ignoreFailures = true
reports { reports {
junitXml.required = true junitXml.required = true
@@ -311,13 +249,10 @@ subprojects {subProj ->
junitXml.outputLocation = layout.buildDirectory.dir("test-results/flakyTest") junitXml.outputLocation = layout.buildDirectory.dir("test-results/flakyTest")
} }
commonTestConfig(t) commonTestConfig(t)
} }
// test task (default) test {
tasks.named('test', Test) { Test t ->
group = 'verification'
description = 'Runs all non-flaky tests.'
useJUnitPlatform { useJUnitPlatform {
excludeTags 'flaky' excludeTags 'flaky'
} }
@@ -328,12 +263,10 @@ subprojects {subProj ->
junitXml.includeSystemErrLog = true junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test") junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
} }
commonTestConfig(t) commonTestConfig(it)
jvmArgs = ["-javaagent:${configurations.agent.singleFile}"]
}
tasks.named('check') {
dependsOn(tasks.named('test'))// default behaviour finalizedBy(tasks.named('flakyTest'))
} }
testlogger { testlogger {
@@ -349,25 +282,83 @@ subprojects {subProj ->
} }
} }
/**********************************************************************************************************************\
* End-to-End Tests
**********************************************************************************************************************/
def e2eTestsCheck = tasks.register('e2eTestsCheck') {
group = 'verification'
description = "Runs the 'check' task for all e2e-tests modules"
doFirst {
project.ext.set("e2e-tests", true)
}
}
subprojects {
// Add e2e-tests modules check tasks to e2eTestsCheck
if (project.name.startsWith("e2e-tests")) {
test {
onlyIf {
project.hasProperty("e2e-tests")
}
}
}
afterEvaluate {
// Add e2e-tests modules check tasks to e2eTestsCheck
if (project.name.startsWith("e2e-tests")) {
e2eTestsCheck.configure {
finalizedBy(check)
}
}
}
}
/**********************************************************************************************************************\
* Allure Reports
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
dependencies {
testImplementation platform("io.qameta.allure:allure-bom")
testImplementation "io.qameta.allure:allure-junit5"
}
configurations {
agent {
canBeResolved = true
canBeConsumed = true
}
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.25"
}
test {
jvmArgs = ["-javaagent:${configurations.agent.singleFile}"]
}
}
}
/**********************************************************************************************************************\
* Jacoco
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
apply plugin: 'jacoco'
test {
finalizedBy jacocoTestReport
}
jacocoTestReport {
dependsOn test
}
}
}
tasks.named('check') { tasks.named('check') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport) dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
finalizedBy jacocoTestReport
}
tasks.register('unitTest') {
// No jacocoTestReport here, because it depends by default on :test,
// and that would make :test being run twice in our CI.
// In practice the report will be generated later in the CI by :check.
}
tasks.register('integrationTest') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
finalizedBy jacocoTestReport
}
tasks.register('flakyTest') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
finalizedBy jacocoTestReport
} }
tasks.named('testCodeCoverageReport') { tasks.named('testCodeCoverageReport') {

View File

@@ -42,7 +42,7 @@ import picocli.CommandLine.Option;
@Introspected @Introspected
public abstract class AbstractCommand implements Callable<Integer> { public abstract class AbstractCommand implements Callable<Integer> {
@Inject @Inject
protected ApplicationContext applicationContext; private ApplicationContext applicationContext;
@Inject @Inject
private EndpointDefaultConfiguration endpointConfiguration; private EndpointDefaultConfiguration endpointConfiguration;

View File

@@ -18,8 +18,7 @@ import picocli.CommandLine;
FlowDotCommand.class, FlowDotCommand.class,
FlowExportCommand.class, FlowExportCommand.class,
FlowUpdateCommand.class, FlowUpdateCommand.class,
FlowUpdatesCommand.class, FlowUpdatesCommand.class
FlowsSyncFromSourceCommand.class
} }
) )
@Slf4j @Slf4j

View File

@@ -1,55 +0,0 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractApiCommand;
import io.kestra.cli.services.TenantIdSelectorService;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import jakarta.inject.Inject;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@CommandLine.Command(
name = "syncFromSource",
description = "Update a single flow",
mixinStandardHelpOptions = true
)
@Slf4j
public class FlowsSyncFromSourceCommand extends AbstractApiCommand {
@Inject
private TenantIdSelectorService tenantService;
@SuppressWarnings("deprecation")
@Override
public Integer call() throws Exception {
super.call();
FlowRepositoryInterface repository = applicationContext.getBean(FlowRepositoryInterface.class);
String tenant = tenantService.getTenantId(tenantId);
List<FlowWithSource> persistedFlows = repository.findAllWithSource(tenant);
int count = 0;
for (FlowWithSource persistedFlow : persistedFlows) {
// Ensure exactly one trailing newline. We need this new line
// because when we update a flow from its source,
// we don't update it if no change is detected.
// The goal here is to force an update from the source for every flows
GenericFlow flow = GenericFlow.fromYaml(tenant,persistedFlow.getSource() + System.lineSeparator());
repository.update(flow, persistedFlow);
stdOut("- %s.%s".formatted(flow.getNamespace(), flow.getId()));
count++;
}
stdOut("%s flow(s) successfully updated!".formatted(count));
return 0;
}
protected boolean loadExternalPlugins() {
return true;
}
}

View File

@@ -1,73 +0,0 @@
package io.kestra.cli.commands.flows;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import io.micronaut.runtime.server.EmbeddedServer;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import java.util.List;
import org.junit.jupiter.api.Test;
class FlowsSyncFromSourceCommandTest {
@Test
void updateAllFlowsFromSource() {
URL directory = FlowUpdatesCommandTest.class.getClassLoader().getResource("flows");
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
embeddedServer.start();
String[] args = {
"--plugins",
"/tmp", // pass this arg because it can cause failure
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word",
"--delete",
directory.getPath(),
};
PicocliRunner.call(FlowUpdatesCommand.class, ctx, args);
assertThat(out.toString()).contains("successfully updated !");
out.reset();
FlowRepositoryInterface repository = ctx.getBean(FlowRepositoryInterface.class);
List<Flow> flows = repository.findAll(MAIN_TENANT);
for (Flow flow : flows) {
assertThat(flow.getRevision()).isEqualTo(1);
}
args = new String[]{
"--plugins",
"/tmp", // pass this arg because it can cause failure
"--server",
embeddedServer.getURL().toString(),
"--user",
"myuser:pass:word"
};
PicocliRunner.call(FlowsSyncFromSourceCommand.class, ctx, args);
assertThat(out.toString()).contains("4 flow(s) successfully updated!");
assertThat(out.toString()).contains("- io.kestra.outsider.quattro");
assertThat(out.toString()).contains("- io.kestra.cli.second");
assertThat(out.toString()).contains("- io.kestra.cli.third");
assertThat(out.toString()).contains("- io.kestra.cli.first");
flows = repository.findAll(MAIN_TENANT);
for (Flow flow : flows) {
assertThat(flow.getRevision()).isEqualTo(2);
}
}
}
}

View File

@@ -4,6 +4,7 @@ import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext; import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment; import io.micronaut.context.env.Environment;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.io.File; import java.io.File;
@@ -14,6 +15,7 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
@@ -23,8 +25,7 @@ class PluginDocCommandTest {
@Test @Test
void run() throws IOException, URISyntaxException { void run() throws IOException, URISyntaxException {
var testDirectoryName = PluginListCommandTest.class.getSimpleName(); Path pluginsPath = Files.createTempDirectory(PluginListCommandTest.class.getSimpleName());
Path pluginsPath = Files.createTempDirectory(testDirectoryName + "_pluginsPath_");
pluginsPath.toFile().deleteOnExit(); pluginsPath.toFile().deleteOnExit();
FileUtils.copyFile( FileUtils.copyFile(
@@ -33,7 +34,7 @@ class PluginDocCommandTest {
new File(URI.create("file://" + pluginsPath.toAbsolutePath() + "/" + PLUGIN_TEMPLATE_TEST)) new File(URI.create("file://" + pluginsPath.toAbsolutePath() + "/" + PLUGIN_TEMPLATE_TEST))
); );
Path docPath = Files.createTempDirectory(testDirectoryName + "_docPath_"); Path docPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
docPath.toFile().deleteOnExit(); docPath.toFile().deleteOnExit();
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) { try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
@@ -42,9 +43,9 @@ class PluginDocCommandTest {
List<Path> files = Files.list(docPath).toList(); List<Path> files = Files.list(docPath).toList();
assertThat(files.stream().map(path -> path.getFileName().toString())).contains("plugin-template-test"); assertThat(files.size()).isEqualTo(1);
// don't know why, but sometimes there is an addition "plugin-notifications" directory present assertThat(files.getFirst().getFileName().toString()).isEqualTo("plugin-template-test");
var directory = files.stream().filter(path -> "plugin-template-test".equals(path.getFileName().toString())).findFirst().get().toFile(); var directory = files.getFirst().toFile();
assertThat(directory.isDirectory()).isTrue(); assertThat(directory.isDirectory()).isTrue();
assertThat(directory.listFiles().length).isEqualTo(3); assertThat(directory.listFiles().length).isEqualTo(3);

View File

@@ -82,8 +82,8 @@ dependencies {
testImplementation "io.micronaut:micronaut-http-server-netty" testImplementation "io.micronaut:micronaut-http-server-netty"
testImplementation "io.micronaut:micronaut-management" testImplementation "io.micronaut:micronaut-management"
testImplementation "org.testcontainers:testcontainers:1.21.4" testImplementation "org.testcontainers:testcontainers:1.21.3"
testImplementation "org.testcontainers:junit-jupiter:1.21.4" testImplementation "org.testcontainers:junit-jupiter:1.21.3"
testImplementation "org.bouncycastle:bcpkix-jdk18on" testImplementation "org.bouncycastle:bcpkix-jdk18on"
testImplementation "org.wiremock:wiremock-jetty12" testImplementation "org.wiremock:wiremock-jetty12"

View File

@@ -42,12 +42,13 @@ import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.plugins.RegisteredPlugin; import io.kestra.core.plugins.RegisteredPlugin;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.core.annotation.Nullable; import io.micronaut.core.annotation.Nullable;
import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.media.Content; import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.inject.Singleton; import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.*; import java.lang.reflect.*;
import java.time.*; import java.time.*;
@@ -298,9 +299,7 @@ public class JsonSchemaGenerator {
} }
// default value // default value
builder.forFields() builder.forFields().withDefaultResolver(this::defaults);
.withIgnoreCheck(fieldScope -> fieldScope.getAnnotation(Hidden.class) != null)
.withDefaultResolver(this::defaults);
// def name // def name
builder.forTypesInGeneral() builder.forTypesInGeneral()
@@ -810,9 +809,9 @@ public class JsonSchemaGenerator {
// we don't return base properties unless specified with @PluginProperty and hidden is false // we don't return base properties unless specified with @PluginProperty and hidden is false
builder builder
.forFields() .forFields()
.withIgnoreCheck(fieldScope -> (base != null && .withIgnoreCheck(fieldScope -> base != null &&
(fieldScope.getAnnotation(PluginProperty.class) == null || fieldScope.getAnnotation(PluginProperty.class).hidden()) && (fieldScope.getAnnotation(PluginProperty.class) == null || fieldScope.getAnnotation(PluginProperty.class).hidden()) &&
fieldScope.getDeclaringType().getTypeName().equals(base.getName())) || fieldScope.getAnnotation(Hidden.class) != null fieldScope.getDeclaringType().getTypeName().equals(base.getName())
); );
SchemaGeneratorConfig schemaGeneratorConfig = builder.build(); SchemaGeneratorConfig schemaGeneratorConfig = builder.build();

View File

@@ -3,7 +3,6 @@ package io.kestra.core.docs;
import io.kestra.core.models.annotations.PluginSubGroup; import io.kestra.core.models.annotations.PluginSubGroup;
import io.kestra.core.plugins.RegisteredPlugin; import io.kestra.core.plugins.RegisteredPlugin;
import io.micronaut.core.annotation.Nullable; import io.micronaut.core.annotation.Nullable;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
@@ -118,17 +117,10 @@ public class Plugin {
.filter(not(io.kestra.core.models.Plugin::isInternal)) .filter(not(io.kestra.core.models.Plugin::isInternal))
.filter(clazzFilter) .filter(clazzFilter)
.filter(c -> !c.getName().startsWith("org.kestra.")) .filter(c -> !c.getName().startsWith("org.kestra."))
.map(c -> { .map(c -> new PluginElementMetadata(c.getName(), io.kestra.core.models.Plugin.isDeprecated(c) ? true : null))
Schema schema = c.getAnnotation(Schema.class);
var title = Optional.ofNullable(schema).map(Schema::title).filter(t -> !t.isEmpty()).orElse(null);
var description = Optional.ofNullable(schema).map(Schema::description).filter(d -> !d.isEmpty()).orElse(null);
var deprecated = io.kestra.core.models.Plugin.isDeprecated(c) ? true : null;
return new PluginElementMetadata(c.getName(), deprecated, title, description);
})
.toList(); .toList();
} }
public record PluginElementMetadata(String cls, Boolean deprecated, String title, String description) {} public record PluginElementMetadata(String cls, Boolean deprecated) {
}
} }

View File

@@ -1,37 +0,0 @@
package io.kestra.core.exceptions;
import io.kestra.core.models.flows.Data;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Output;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Exception that can be thrown when Inputs/Outputs have validation problems.
*/
public class InputOutputValidationException extends KestraRuntimeException {
public InputOutputValidationException(String message) {
super(message);
}
public static InputOutputValidationException of( String message, Input<?> input){
String inputMessage = "Invalid value for input" + " `" + input.getId() + "`. Cause: " + message;
return new InputOutputValidationException(inputMessage);
}
public static InputOutputValidationException of( String message, Output output){
String outputMessage = "Invalid value for output" + " `" + output.getId() + "`. Cause: " + message;
return new InputOutputValidationException(outputMessage);
}
public static InputOutputValidationException of(String message){
return new InputOutputValidationException(message);
}
public static InputOutputValidationException merge(Set<InputOutputValidationException> exceptions){
String combinedMessage = exceptions.stream()
.map(InputOutputValidationException::getMessage)
.collect(Collectors.joining(System.lineSeparator()));
throw new InputOutputValidationException(combinedMessage);
}
}

View File

@@ -1,8 +1,6 @@
package io.kestra.core.exceptions; package io.kestra.core.exceptions;
import java.io.Serial; import java.io.Serial;
import java.util.List;
import java.util.stream.Collectors;
/** /**
* The top-level {@link KestraRuntimeException} for non-recoverable errors. * The top-level {@link KestraRuntimeException} for non-recoverable errors.

View File

@@ -4,16 +4,13 @@ import io.kestra.core.utils.MapUtils;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Pattern;
import java.util.*; import java.util.*;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.") @Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
public record Label( public record Label(@NotEmpty String key, @NotEmpty String value) {
@NotEmpty @Pattern(regexp = "^[\\p{Ll}][\\p{L}0-9._-]*$", message = "Invalid label key. A valid key contains only lowercase letters numbers hyphens (-) underscores (_) or periods (.) and must begin with a lowercase letter.") String key,
@NotEmpty String value) {
public static final String SYSTEM_PREFIX = "system."; public static final String SYSTEM_PREFIX = "system.";
// system labels // system labels
@@ -26,7 +23,6 @@ public record Label(
public static final String REPLAYED = SYSTEM_PREFIX + "replayed"; public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution"; public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
public static final String TEST = SYSTEM_PREFIX + "test"; public static final String TEST = SYSTEM_PREFIX + "test";
public static final String FROM = SYSTEM_PREFIX + "from";
/** /**
* Static helper method for converting a list of labels to a nested map. * Static helper method for converting a list of labels to a nested map.

View File

@@ -94,7 +94,7 @@ public record QueryFilter(
KIND("kind") { KIND("kind") {
@Override @Override
public List<Op> supportedOp() { public List<Op> supportedOp() {
return List.of(Op.EQUALS,Op.NOT_EQUALS, Op.IN, Op.NOT_IN); return List.of(Op.EQUALS,Op.NOT_EQUALS);
} }
}, },
LABELS("labels") { LABELS("labels") {
@@ -106,7 +106,7 @@ public record QueryFilter(
FLOW_ID("flowId") { FLOW_ID("flowId") {
@Override @Override
public List<Op> supportedOp() { public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX); return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
} }
}, },
UPDATED("updated") { UPDATED("updated") {
@@ -151,12 +151,6 @@ public record QueryFilter(
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN); return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
} }
}, },
TRIGGER_STATE("triggerState"){
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS);
}
},
EXECUTION_ID("executionId") { EXECUTION_ID("executionId") {
@Override @Override
public List<Op> supportedOp() { public List<Op> supportedOp() {
@@ -232,7 +226,7 @@ public record QueryFilter(
FLOW { FLOW {
@Override @Override
public List<Field> supportedField() { public List<Field> supportedField() {
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE, Field.FLOW_ID); return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
} }
}, },
NAMESPACE { NAMESPACE {
@@ -247,7 +241,7 @@ public record QueryFilter(
return List.of( return List.of(
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE, Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER, Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
Field.NAMESPACE, Field.KIND Field.NAMESPACE,Field.KIND
); );
} }
}, },
@@ -277,7 +271,7 @@ public record QueryFilter(
@Override @Override
public List<Field> supportedField() { public List<Field> supportedField() {
return List.of(Field.QUERY, Field.SCOPE, Field.NAMESPACE, Field.WORKER_ID, Field.FLOW_ID, return List.of(Field.QUERY, Field.SCOPE, Field.NAMESPACE, Field.WORKER_ID, Field.FLOW_ID,
Field.START_DATE, Field.END_DATE, Field.TRIGGER_ID, Field.TRIGGER_STATE Field.START_DATE, Field.END_DATE, Field.TRIGGER_ID
); );
} }
}, },

View File

@@ -16,7 +16,6 @@ import jakarta.validation.constraints.NotNull;
public class Setting { public class Setting {
public static final String INSTANCE_UUID = "instance.uuid"; public static final String INSTANCE_UUID = "instance.uuid";
public static final String INSTANCE_VERSION = "instance.version"; public static final String INSTANCE_VERSION = "instance.version";
public static final String INSTANCE_EDITION = "instance.edition";
@NotNull @NotNull
private String key; private String key;

View File

@@ -3,7 +3,9 @@ package io.kestra.core.models.executions;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.models.TenantInterface; import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.tasks.FlowableTask;
import io.kestra.core.models.tasks.ResolvedTask; import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.retrys.AbstractRetry; import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import io.swagger.v3.oas.annotations.Hidden; import io.swagger.v3.oas.annotations.Hidden;
@@ -93,16 +95,8 @@ public class TaskRun implements TenantInterface {
this.forceExecution this.forceExecution
); );
} }
public TaskRun withStateAndAttempt(State.Type state) {
List<TaskRunAttempt> newAttempts = new ArrayList<>(this.attempts != null ? this.attempts : List.of());
if (newAttempts.isEmpty()) {
newAttempts.add(TaskRunAttempt.builder().state(new State(state)).build());
} else {
TaskRunAttempt updatedLast = newAttempts.getLast().withState(state);
newAttempts.set(newAttempts.size() - 1, updatedLast);
}
public TaskRun replaceState(State newState) {
return new TaskRun( return new TaskRun(
this.tenantId, this.tenantId,
this.id, this.id,
@@ -112,9 +106,9 @@ public class TaskRun implements TenantInterface {
this.taskId, this.taskId,
this.parentTaskRunId, this.parentTaskRunId,
this.value, this.value,
newAttempts, this.attempts,
this.outputs, this.outputs,
this.state.withState(state), newState,
this.iteration, this.iteration,
this.dynamic, this.dynamic,
this.forceExecution this.forceExecution

View File

@@ -1,5 +1,7 @@
package io.kestra.core.models.flows; package io.kestra.core.models.flows;
import io.kestra.core.models.validations.ManualConstraintViolation;
import jakarta.validation.ConstraintViolationException;
/** /**
* Interface for defining an identifiable and typed data. * Interface for defining an identifiable and typed data.
@@ -27,4 +29,16 @@ public interface Data {
*/ */
String getDisplayName(); String getDisplayName();
@SuppressWarnings("unchecked")
default ConstraintViolationException toConstraintViolationException(String message, Object value) {
Class<Data> cls = (Class<Data>) this.getClass();
return ManualConstraintViolation.toConstraintViolationException(
"Invalid " + (this instanceof Output ? "output" : "input") + " for `" + getId() + "`, " + message + ", but received `" + value + "`",
this,
cls,
this.getId(),
value
);
}
} }

View File

@@ -1,5 +1,6 @@
package io.kestra.core.models.flows; package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
@@ -129,7 +130,7 @@ public class Flow extends AbstractFlow implements HasUID {
@Valid @Valid
@PluginProperty @PluginProperty
List<SLA> sla; List<SLA> sla;
@Schema( @Schema(
title = "Conditions evaluated before the flow is executed.", title = "Conditions evaluated before the flow is executed.",
description = "A list of conditions that are evaluated before the flow is executed. If no checks are defined, the flow executes normally." description = "A list of conditions that are evaluated before the flow is executed. If no checks are defined, the flow executes normally."
@@ -354,7 +355,7 @@ public class Flow extends AbstractFlow implements HasUID {
* To be conservative a flow MUST not return any source. * To be conservative a flow MUST not return any source.
*/ */
@Override @Override
@Schema(hidden = true) @JsonIgnore
public String getSource() { public String getSource() {
return null; return null;
} }

View File

@@ -1,12 +1,14 @@
package io.kestra.core.models.flows; package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.micronaut.core.annotation.Introspected; import io.micronaut.core.annotation.Introspected;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.ToString; import lombok.ToString;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.Objects;
import java.util.regex.Pattern;
@SuperBuilder(toBuilder = true) @SuperBuilder(toBuilder = true)
@Getter @Getter
@@ -46,7 +48,7 @@ public class FlowWithSource extends Flow {
} }
@Override @Override
@Schema(hidden = false) @JsonIgnore(value = false)
public String getSource() { public String getSource() {
return this.source; return this.source;
} }

View File

@@ -267,10 +267,6 @@ public class State {
return this == Type.RUNNING || this == Type.KILLING; return this == Type.RUNNING || this == Type.KILLING;
} }
public boolean onlyRunning() {
return this == Type.RUNNING;
}
public boolean isFailed() { public boolean isFailed() {
return this == Type.FAILED; return this == Type.FAILED;
} }

View File

@@ -1,12 +1,10 @@
package io.kestra.core.models.flows.input; package io.kestra.core.models.flows.input;
import io.kestra.core.exceptions.InputOutputValidationException;
import io.kestra.core.models.flows.Input; import io.kestra.core.models.flows.Input;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import jakarta.validation.ConstraintViolationException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import java.util.Set;
/** /**
* Represents an input along with its associated value and validation state. * Represents an input along with its associated value and validation state.
* *
@@ -14,15 +12,15 @@ import java.util.Set;
* @param value The provided value for the input. * @param value The provided value for the input.
* @param enabled {@code true} if the input is enabled; {@code false} otherwise. * @param enabled {@code true} if the input is enabled; {@code false} otherwise.
* @param isDefault {@code true} if the provided value is the default; {@code false} otherwise. * @param isDefault {@code true} if the provided value is the default; {@code false} otherwise.
* @param exceptions The validation exceptions, if the input value is invalid; {@code null} otherwise. * @param exception The validation exception, if the input value is invalid; {@code null} otherwise.
*/ */
public record InputAndValue( public record InputAndValue(
Input<?> input, Input<?> input,
Object value, Object value,
boolean enabled, boolean enabled,
boolean isDefault, boolean isDefault,
Set<InputOutputValidationException> exceptions) { ConstraintViolationException exception) {
/** /**
* Creates a new {@link InputAndValue} instance. * Creates a new {@link InputAndValue} instance.
* *

View File

@@ -7,7 +7,6 @@ import io.kestra.core.models.property.Property;
import io.kestra.core.models.validations.ManualConstraintViolation; import io.kestra.core.models.validations.ManualConstraintViolation;
import io.kestra.core.validations.Regex; import io.kestra.core.validations.Regex;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.ConstraintViolation;
import jakarta.validation.ConstraintViolationException; import jakarta.validation.ConstraintViolationException;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import lombok.Builder; import lombok.Builder;
@@ -15,7 +14,10 @@ import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import java.util.*; import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
@SuperBuilder @SuperBuilder
@@ -75,35 +77,30 @@ public class MultiselectInput extends Input<List<String>> implements ItemTypeInt
@Override @Override
public void validate(List<String> inputs) throws ConstraintViolationException { public void validate(List<String> inputs) throws ConstraintViolationException {
Set<ConstraintViolation<?>> violations = new HashSet<>();
if (values != null && options != null) { if (values != null && options != null) {
violations.add( ManualConstraintViolation.of( throw ManualConstraintViolation.toConstraintViolationException(
"you can't define both `values` and `options`", "you can't define both `values` and `options`",
this, this,
MultiselectInput.class, MultiselectInput.class,
getId(), getId(),
"" ""
)); );
} }
if (!this.getAllowCustomValue()) { if (!this.getAllowCustomValue()) {
for (String input : inputs) { for (String input : inputs) {
List<@Regex String> finalValues = this.values != null ? this.values : this.options; List<@Regex String> finalValues = this.values != null ? this.values : this.options;
if (!finalValues.contains(input)) { if (!finalValues.contains(input)) {
violations.add(ManualConstraintViolation.of( throw ManualConstraintViolation.toConstraintViolationException(
"value `" + input + "` doesn't match the values `" + finalValues + "`", "it must match the values `" + finalValues + "`",
this, this,
MultiselectInput.class, MultiselectInput.class,
getId(), getId(),
input input
)); );
} }
} }
} }
if (!violations.isEmpty()) {
throw ManualConstraintViolation.toConstraintViolationException(violations);
}
} }
/** {@inheritDoc} **/ /** {@inheritDoc} **/
@@ -148,7 +145,7 @@ public class MultiselectInput extends Input<List<String>> implements ItemTypeInt
String type = Optional.ofNullable(result).map(Object::getClass).map(Class::getSimpleName).orElse("<null>"); String type = Optional.ofNullable(result).map(Object::getClass).map(Class::getSimpleName).orElse("<null>");
throw ManualConstraintViolation.toConstraintViolationException( throw ManualConstraintViolation.toConstraintViolationException(
"Invalid expression result. Expected a list of strings", "Invalid expression result. Expected a list of strings, but received " + type,
this, this,
MultiselectInput.class, MultiselectInput.class,
getId(), getId(),

View File

@@ -125,7 +125,7 @@ public class SelectInput extends Input<String> implements RenderableInput {
String type = Optional.ofNullable(result).map(Object::getClass).map(Class::getSimpleName).orElse("<null>"); String type = Optional.ofNullable(result).map(Object::getClass).map(Class::getSimpleName).orElse("<null>");
throw ManualConstraintViolation.toConstraintViolationException( throw ManualConstraintViolation.toConstraintViolationException(
"Invalid expression result. Expected a list of strings", "Invalid expression result. Expected a list of strings, but received " + type,
this, this,
SelectInput.class, SelectInput.class,
getId(), getId(),

View File

@@ -11,7 +11,6 @@ import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.exceptions.IllegalVariableEvaluationException; import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.RunContextProperty;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.serializers.JacksonMapper;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
@@ -94,7 +93,7 @@ public class Property<T> {
* @return a new {@link Property} without a pre-rendered value * @return a new {@link Property} without a pre-rendered value
*/ */
public Property<T> skipCache() { public Property<T> skipCache() {
return new Property<>(expression, true); return Property.ofExpression(expression);
} }
/** /**
@@ -157,9 +156,9 @@ public class Property<T> {
/** /**
* Render a property, then convert it to its target type.<br> * Render a property, then convert it to its target type.<br>
* <p> * <p>
* This method is designed to be used only by the {@link RunContextProperty}. * This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
* *
* @see RunContextProperty#as(Class) * @see io.kestra.core.runners.RunContextProperty#as(Class)
*/ */
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz) throws IllegalVariableEvaluationException { public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz) throws IllegalVariableEvaluationException {
return as(property, context, clazz, Map.of()); return as(property, context, clazz, Map.of());
@@ -168,57 +167,25 @@ public class Property<T> {
/** /**
* Render a property with additional variables, then convert it to its target type.<br> * Render a property with additional variables, then convert it to its target type.<br>
* <p> * <p>
* This method is designed to be used only by the {@link RunContextProperty}. * This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
* *
* @see RunContextProperty#as(Class, Map) * @see io.kestra.core.runners.RunContextProperty#as(Class, Map)
*/ */
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz, Map<String, Object> variables) throws IllegalVariableEvaluationException { public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
if (property.skipCache || property.value == null) { if (property.skipCache || property.value == null) {
String rendered = context.render(property.expression, variables); String rendered = context.render(property.expression, variables);
property.value = deserialize(rendered, clazz); property.value = MAPPER.convertValue(rendered, clazz);
} }
return property.value; return property.value;
} }
private static <T> T deserialize(Object rendered, Class<T> clazz) throws IllegalVariableEvaluationException {
try {
return MAPPER.convertValue(rendered, clazz);
} catch (IllegalArgumentException e) {
if (rendered instanceof String str) {
try {
return MAPPER.readValue(str, clazz);
} catch (JsonProcessingException ex) {
throw new IllegalVariableEvaluationException(ex);
}
}
throw new IllegalVariableEvaluationException(e);
}
}
private static <T> T deserialize(Object rendered, JavaType type) throws IllegalVariableEvaluationException {
try {
return MAPPER.convertValue(rendered, type);
} catch (IllegalArgumentException e) {
if (rendered instanceof String str) {
try {
return MAPPER.readValue(str, type);
} catch (JsonProcessingException ex) {
throw new IllegalVariableEvaluationException(ex);
}
}
throw new IllegalVariableEvaluationException(e);
}
}
/** /**
* Render a property then convert it as a list of target type.<br> * Render a property then convert it as a list of target type.<br>
* <p> * <p>
* This method is designed to be used only by the {@link RunContextProperty}. * This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
* *
* @see RunContextProperty#asList(Class) * @see io.kestra.core.runners.RunContextProperty#asList(Class)
*/ */
public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz) throws IllegalVariableEvaluationException { public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz) throws IllegalVariableEvaluationException {
return asList(property, context, itemClazz, Map.of()); return asList(property, context, itemClazz, Map.of());
@@ -227,39 +194,37 @@ public class Property<T> {
/** /**
* Render a property with additional variables, then convert it as a list of target type.<br> * Render a property with additional variables, then convert it as a list of target type.<br>
* <p> * <p>
* This method is designed to be used only by the {@link RunContextProperty}. * This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
* *
* @see RunContextProperty#asList(Class, Map) * @see io.kestra.core.runners.RunContextProperty#asList(Class, Map)
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException { public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
if (property.skipCache || property.value == null) { if (property.skipCache || property.value == null) {
JavaType type = MAPPER.getTypeFactory().constructCollectionLikeType(List.class, itemClazz); JavaType type = MAPPER.getTypeFactory().constructCollectionLikeType(List.class, itemClazz);
String trimmedExpression = property.expression.trim(); try {
// We need to detect if the expression is already a list or if it's a pebble expression (for eg. referencing a variable containing a list). String trimmedExpression = property.expression.trim();
// Doing that allows us to, if it's an expression, first render then read it as a list. // We need to detect if the expression is already a list or if it's a pebble expression (for eg. referencing a variable containing a list).
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) { // Doing that allows us to, if it's an expression, first render then read it as a list.
property.value = deserialize(context.render(property.expression, variables), type); if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
} property.value = MAPPER.readValue(context.render(property.expression, variables), type);
// Otherwise, if it's already a list, we read it as a list first then render it from run context which handle list rendering by rendering each item of the list }
else { // Otherwise, if it's already a list, we read it as a list first then render it from run context which handle list rendering by rendering each item of the list
List<?> asRawList = deserialize(property.expression, List.class); else {
property.value = (T) asRawList.stream() List<?> asRawList = MAPPER.readValue(property.expression, List.class);
.map(throwFunction(item -> { property.value = (T) asRawList.stream()
Object rendered = null; .map(throwFunction(item -> {
if (item instanceof String str) { if (item instanceof String str) {
rendered = context.render(str, variables); return MAPPER.convertValue(context.render(str, variables), itemClazz);
} else if (item instanceof Map map) { } else if (item instanceof Map map) {
rendered = context.render(map, variables); return MAPPER.convertValue(context.render(map, variables), itemClazz);
} }
return item;
if (rendered != null) { }))
return deserialize(rendered, itemClazz); .toList();
} }
} catch (JsonProcessingException e) {
return item; throw new IllegalVariableEvaluationException(e);
}))
.toList();
} }
} }
@@ -269,9 +234,9 @@ public class Property<T> {
/** /**
* Render a property then convert it as a map of target types.<br> * Render a property then convert it as a map of target types.<br>
* <p> * <p>
* This method is designed to be used only by the {@link RunContextProperty}. * This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
* *
* @see RunContextProperty#asMap(Class, Class) * @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class)
*/ */
public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass) throws IllegalVariableEvaluationException { public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass) throws IllegalVariableEvaluationException {
return asMap(property, runContext, keyClass, valueClass, Map.of()); return asMap(property, runContext, keyClass, valueClass, Map.of());
@@ -283,7 +248,7 @@ public class Property<T> {
* This method is safe to be used as many times as you want as the rendering and conversion will be cached. * This method is safe to be used as many times as you want as the rendering and conversion will be cached.
* Warning, due to the caching mechanism, this method is not thread-safe. * Warning, due to the caching mechanism, this method is not thread-safe.
* *
* @see RunContextProperty#asMap(Class, Class, Map) * @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class, Map)
*/ */
@SuppressWarnings({"rawtypes", "unchecked"}) @SuppressWarnings({"rawtypes", "unchecked"})
public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException { public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException {
@@ -295,12 +260,12 @@ public class Property<T> {
// We need to detect if the expression is already a map or if it's a pebble expression (for eg. referencing a variable containing a map). // We need to detect if the expression is already a map or if it's a pebble expression (for eg. referencing a variable containing a map).
// Doing that allows us to, if it's an expression, first render then read it as a map. // Doing that allows us to, if it's an expression, first render then read it as a map.
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) { if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
property.value = deserialize(runContext.render(property.expression, variables), targetMapType); property.value = MAPPER.readValue(runContext.render(property.expression, variables), targetMapType);
} }
// Otherwise if it's already a map we read it as a map first then render it from run context which handle map rendering by rendering each entry of the map (otherwise it will fail with nested expressions in values for eg.) // Otherwise if it's already a map we read it as a map first then render it from run context which handle map rendering by rendering each entry of the map (otherwise it will fail with nested expressions in values for eg.)
else { else {
Map asRawMap = MAPPER.readValue(property.expression, Map.class); Map asRawMap = MAPPER.readValue(property.expression, Map.class);
property.value = deserialize(runContext.render(asRawMap, variables), targetMapType); property.value = MAPPER.convertValue(runContext.render(asRawMap, variables), targetMapType);
} }
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
throw new IllegalVariableEvaluationException(e); throw new IllegalVariableEvaluationException(e);

View File

@@ -82,12 +82,6 @@ abstract public class AbstractTrigger implements TriggerInterface {
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP) @PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
private boolean failOnTriggerError = false; private boolean failOnTriggerError = false;
@PluginProperty(group = PluginProperty.CORE_GROUP)
@Schema(
title = "Specifies whether a trigger is allowed to start a new execution even if a previous run is still in progress."
)
private boolean allowConcurrent = false;
/** /**
* For backward compatibility: we rename minLogLevel to logLevel. * For backward compatibility: we rename minLogLevel to logLevel.
* @deprecated use {@link #logLevel} instead * @deprecated use {@link #logLevel} instead

View File

@@ -1,37 +1,22 @@
package io.kestra.core.models.triggers; package io.kestra.core.models.triggers;
import io.kestra.core.exceptions.IllegalVariableEvaluationException; import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.conditions.ConditionContext; import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.Map;
public interface Schedulable extends PollingTriggerInterface{ public interface Schedulable extends PollingTriggerInterface{
String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules"; String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules";
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
Map<String, Object> getInputs();
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
RecoverMissedSchedules getRecoverMissedSchedules();
/** /**
* Compute the previous evaluation of a trigger. * Compute the previous evaluation of a trigger.
* This is used when a trigger misses some schedule to compute the next date to evaluate in the past. * This is used when a trigger misses some schedule to compute the next date to evaluate in the past.
*/ */
ZonedDateTime previousEvaluationDate(ConditionContext conditionContext) throws IllegalVariableEvaluationException; ZonedDateTime previousEvaluationDate(ConditionContext conditionContext) throws IllegalVariableEvaluationException;
RecoverMissedSchedules getRecoverMissedSchedules();
/** /**
* Load the default RecoverMissedSchedules from plugin property, or else ALL. * Load the default RecoverMissedSchedules from plugin property, or else ALL.
*/ */

View File

@@ -172,7 +172,7 @@ public class Trigger extends TriggerContext implements HasUID {
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) { if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
try { try {
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, lastTrigger); nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
} catch (InvalidTriggerConfigurationException e) { } catch (InvalidTriggerConfigurationException e) {
disabled = true; disabled = true;
} }

View File

@@ -6,9 +6,12 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger; import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.tasks.Output; import io.kestra.core.models.tasks.Output;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils; import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.util.*; import java.util.*;
public abstract class TriggerService { public abstract class TriggerService {
@@ -48,6 +51,58 @@ public abstract class TriggerService {
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext); return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
} }
public static Execution generateScheduledExecution(
AbstractTrigger trigger,
ConditionContext conditionContext,
TriggerContext context,
List<Label> labels,
Map<String, Object> inputs,
Map<String, Object> variables,
Optional<ZonedDateTime> scheduleDate
) {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables);
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(context.getTenantId())
.namespace(context.getNamespace())
.flowId(context.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(scheduleDate.map(date -> date.toInstant()).orElse(null))
.build();
Map<String, Object> allInputs = new HashMap<>();
// add flow inputs with default value
var flow = conditionContext.getFlow();
if (flow.getInputs() != null) {
flow.getInputs().stream()
.filter(input -> input.getDefaults() != null)
.forEach(input -> allInputs.put(input.getId(), input.getDefaults()));
}
if (inputs != null) {
allInputs.putAll(inputs);
}
// add inputs and inject defaults
if (!allInputs.isEmpty()) {
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
}
return execution;
}
private static Execution generateExecution( private static Execution generateExecution(
String id, String id,
AbstractTrigger trigger, AbstractTrigger trigger,
@@ -56,7 +111,6 @@ public abstract class TriggerService {
ConditionContext conditionContext ConditionContext conditionContext
) { ) {
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels())); List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) { if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist // add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, id)); executionLabels.add(new Label(Label.CORRELATION_ID, id));

View File

@@ -67,11 +67,6 @@ public class ManualConstraintViolation<T> implements ConstraintViolation<T> {
invalidValue invalidValue
))); )));
} }
public static <T> ConstraintViolationException toConstraintViolationException(
Set<? extends ConstraintViolation<?>> constraintViolations
) {
return new ConstraintViolationException(constraintViolations);
}
public String getMessageTemplate() { public String getMessageTemplate() {
return "{messageTemplate}"; return "{messageTemplate}";

View File

@@ -1,25 +0,0 @@
package io.kestra.core.plugins.notifications;
import io.kestra.core.models.property.Property;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.Map;
public interface ExecutionInterface {
@Schema(
title = "The execution id to use",
description = "Default is the current execution, " +
"change it to {{ trigger.executionId }} if you use this task with a Flow trigger to use the original execution."
)
Property<String> getExecutionId();
@Schema(
title = "Custom fields to be added on notification"
)
Property<Map<String, Object>> getCustomFields();
@Schema(
title = "Custom message to be added on notification"
)
Property<String> getCustomMessage();
}

View File

@@ -1,140 +0,0 @@
package io.kestra.core.plugins.notifications;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.retrys.Exponential;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.utils.RetryUtils;
import io.kestra.core.utils.UriProvider;
import java.time.Duration;
import java.util.*;
public final class ExecutionService {
private ExecutionService() {}
public static Execution findExecution(RunContext runContext, Property<String> executionId) throws IllegalVariableEvaluationException, NoSuchElementException {
ExecutionRepositoryInterface executionRepository = ((DefaultRunContext) runContext).getApplicationContext().getBean(ExecutionRepositoryInterface.class);
RetryUtils.Instance<Execution, NoSuchElementException> retryInstance = RetryUtils
.of(Exponential.builder()
.delayFactor(2.0)
.interval(Duration.ofSeconds(1))
.maxInterval(Duration.ofSeconds(15))
.maxAttempts(-1)
.maxDuration(Duration.ofMinutes(10))
.build(),
runContext.logger()
);
var executionRendererId = runContext.render(executionId).as(String.class).orElse(null);
var flowTriggerExecutionState = getOptionalFlowTriggerExecutionState(runContext);
var flowVars = (Map<String, String>) runContext.getVariables().get("flow");
var isCurrentExecution = isCurrentExecution(runContext, executionRendererId);
if (isCurrentExecution) {
runContext.logger().info("Loading execution data for the current execution.");
}
return retryInstance.run(
NoSuchElementException.class,
() -> executionRepository.findById(flowVars.get("tenantId"), executionRendererId)
.filter(foundExecution -> isExecutionInTheWantedState(foundExecution, isCurrentExecution, flowTriggerExecutionState))
.orElseThrow(() -> new NoSuchElementException("Unable to find execution '" + executionRendererId + "'"))
);
}
/**
* ExecutionRepository can be out of sync in ElasticSearch stack, with this filter we try to mitigate that
*
* @param execution the Execution we fetched from ExecutionRepository
* @param isCurrentExecution true if this *Execution Task is configured to send a notification for the current Execution
* @param flowTriggerExecutionState the Execution State that triggered the Flow trigger, if any
* @return true if we think we fetched the right Execution data for our usecase
*/
public static boolean isExecutionInTheWantedState(Execution execution, boolean isCurrentExecution, Optional<String> flowTriggerExecutionState) {
if (isCurrentExecution) {
// we don't wait for current execution to be terminated as it could not be possible as long as this task is running
return true;
}
if (flowTriggerExecutionState.isPresent()) {
// we were triggered by a Flow trigger that can be, for example: PAUSED
if (flowTriggerExecutionState.get().equals(State.Type.RUNNING.toString())) {
// RUNNING special case: we take the first state we got
return true;
} else {
// to handle the case where the ExecutionRepository is out of sync in ElasticSearch stack,
// we try to match an Execution with the same state
return execution.getState().getCurrent().name().equals(flowTriggerExecutionState.get());
}
} else {
return execution.getState().getCurrent().isTerminated();
}
}
public static Map<String, Object> executionMap(RunContext runContext, ExecutionInterface executionInterface) throws IllegalVariableEvaluationException {
Execution execution = findExecution(runContext, executionInterface.getExecutionId());
UriProvider uriProvider = ((DefaultRunContext) runContext).getApplicationContext().getBean(UriProvider.class);
Map<String, Object> templateRenderMap = new HashMap<>();
templateRenderMap.put("duration", execution.getState().humanDuration());
templateRenderMap.put("startDate", execution.getState().getStartDate());
templateRenderMap.put("link", uriProvider.executionUrl(execution));
templateRenderMap.put("execution", JacksonMapper.toMap(execution));
runContext.render(executionInterface.getCustomMessage())
.as(String.class)
.ifPresent(s -> templateRenderMap.put("customMessage", s));
final Map<String, Object> renderedCustomFields = runContext.render(executionInterface.getCustomFields()).asMap(String.class, Object.class);
if (!renderedCustomFields.isEmpty()) {
templateRenderMap.put("customFields", renderedCustomFields);
}
var isCurrentExecution = isCurrentExecution(runContext, execution.getId());
List<TaskRun> taskRuns;
if (isCurrentExecution) {
taskRuns = execution.getTaskRunList();
} else {
taskRuns = execution.getTaskRunList().stream()
.filter(t -> (execution.hasFailed() ? State.Type.FAILED : State.Type.SUCCESS).equals(t.getState().getCurrent()))
.toList();
}
if (!ListUtils.isEmpty(taskRuns)) {
TaskRun lastTaskRun = taskRuns.getLast();
templateRenderMap.put("firstFailed", State.Type.FAILED.equals(lastTaskRun.getState().getCurrent()) ? lastTaskRun : false);
templateRenderMap.put("lastTask", lastTaskRun);
}
return templateRenderMap;
}
/**
* if there is a state, we assume this is a Flow trigger with type: {@link io.kestra.plugin.core.trigger.Flow.Output}
*
* @return the state of the execution that triggered the Flow trigger, or empty if another usecase/trigger
*/
private static Optional<String> getOptionalFlowTriggerExecutionState(RunContext runContext) {
var triggerVar = Optional.ofNullable(
runContext.getVariables().get("trigger")
);
return triggerVar.map(trigger -> ((Map<String, String>) trigger).get("state"));
}
private static boolean isCurrentExecution(RunContext runContext, String executionId) {
var executionVars = (Map<String, String>) runContext.getVariables().get("execution");
return executionId.equals(executionVars.get("id"));
}
}

View File

@@ -12,7 +12,6 @@ import io.kestra.core.models.dashboards.charts.DataChart;
import io.kestra.core.plugins.DefaultPluginRegistry; import io.kestra.core.plugins.DefaultPluginRegistry;
import io.kestra.core.plugins.PluginRegistry; import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.context.exceptions.NoSuchBeanException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -73,7 +72,7 @@ public final class PluginDeserializer<T extends Plugin> extends JsonDeserializer
// By default, if no plugin-registry is configured retrieve // By default, if no plugin-registry is configured retrieve
// the one configured from the static Kestra's context. // the one configured from the static Kestra's context.
pluginRegistry = KestraContext.getContext().getPluginRegistry(); pluginRegistry = KestraContext.getContext().getPluginRegistry();
} catch (IllegalStateException | NoSuchBeanException ignore) { } catch (IllegalStateException ignore) {
// This error can only happen if the KestraContext is not initialized (i.e. in unit tests). // This error can only happen if the KestraContext is not initialized (i.e. in unit tests).
log.error("No plugin registry was initialized. Use default implementation."); log.error("No plugin registry was initialized. Use default implementation.");
pluginRegistry = DefaultPluginRegistry.getOrCreate(); pluginRegistry = DefaultPluginRegistry.getOrCreate();

View File

@@ -1,10 +1,10 @@
package io.kestra.core.repositories; package io.kestra.core.repositories;
import io.kestra.core.models.Setting; import io.kestra.core.models.Setting;
import jakarta.validation.ConstraintViolationException;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import jakarta.validation.ConstraintViolationException;
public interface SettingRepositoryInterface { public interface SettingRepositoryInterface {
Optional<Setting> findByKey(String key); Optional<Setting> findByKey(String key);
@@ -13,7 +13,5 @@ public interface SettingRepositoryInterface {
Setting save(Setting setting) throws ConstraintViolationException; Setting save(Setting setting) throws ConstraintViolationException;
Setting internalSave(Setting setting) throws ConstraintViolationException;
Setting delete(Setting setting); Setting delete(Setting setting);
} }

View File

@@ -16,8 +16,8 @@ import java.util.function.Function;
public interface TriggerRepositoryInterface extends QueryBuilderInterface<Triggers.Fields> { public interface TriggerRepositoryInterface extends QueryBuilderInterface<Triggers.Fields> {
Optional<Trigger> findLast(TriggerContext trigger); Optional<Trigger> findLast(TriggerContext trigger);
Optional<Trigger> findByUid(String uid); Optional<Trigger> findByExecution(Execution execution);
List<Trigger> findAll(String tenantId); List<Trigger> findAll(String tenantId);
List<Trigger> findAllForAllTenants(); List<Trigger> findAllForAllTenants();

View File

@@ -6,12 +6,10 @@ import com.google.common.base.CaseFormat;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException; import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.metrics.MetricRegistry; import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.AbstractMetricEntry; import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.Task; import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.plugins.PluginConfigurations;
import io.kestra.core.services.KVStoreService; import io.kestra.core.services.KVStoreService;
import io.kestra.core.storages.Storage; import io.kestra.core.storages.Storage;
import io.kestra.core.storages.StorageInterface; import io.kestra.core.storages.StorageInterface;
@@ -237,14 +235,6 @@ public class DefaultRunContext extends RunContext {
return runContext; return runContext;
} }
@Override
public RunContext cloneForPlugin(Plugin plugin) {
PluginConfigurations pluginConfigurations = applicationContext.getBean(PluginConfigurations.class);
DefaultRunContext runContext = clone();
runContext.pluginConfiguration = pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass());
return runContext;
}
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
@@ -599,11 +589,6 @@ public class DefaultRunContext extends RunContext {
return localPath; return localPath;
} }
@Override
public InputAndOutput inputAndOutput() {
return new InputAndOutputImpl(this.applicationContext, this);
}
/** /**
* Builder class for constructing new {@link DefaultRunContext} objects. * Builder class for constructing new {@link DefaultRunContext} objects.
*/ */

View File

@@ -189,11 +189,12 @@ public final class ExecutableUtils {
variables.put("taskRunIteration", currentTaskRun.getIteration()); variables.put("taskRunIteration", currentTaskRun.getIteration());
} }
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
Instant scheduleOnDate = runContext.render(scheduleDate).as(ZonedDateTime.class).map(date -> date.toInstant()).orElse(null); Instant scheduleOnDate = runContext.render(scheduleDate).as(ZonedDateTime.class).map(date -> date.toInstant()).orElse(null);
Execution execution = Execution Execution execution = Execution
.newExecution( .newExecution(
flow, flow,
(f, e) -> runContext.inputAndOutput().readInputs(f, e, inputs), (f, e) -> flowInputOutput.readExecutionInputs(f, e, inputs),
newLabels, newLabels,
Optional.empty()) Optional.empty())
.withTrigger(ExecutionTrigger.builder() .withTrigger(ExecutionTrigger.builder()

View File

@@ -3,13 +3,13 @@ package io.kestra.core.runners;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.encryption.EncryptionService; import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException; import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.exceptions.InputOutputValidationException;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Data; import io.kestra.core.models.flows.Data;
import io.kestra.core.models.flows.DependsOn; import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.FlowInterface; import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input; import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Output;
import io.kestra.core.models.flows.RenderableInput; import io.kestra.core.models.flows.RenderableInput;
import io.kestra.core.models.flows.Type; import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.FileInput; import io.kestra.core.models.flows.input.FileInput;
@@ -19,6 +19,7 @@ import io.kestra.core.models.property.Property;
import io.kestra.core.models.property.PropertyContext; import io.kestra.core.models.property.PropertyContext;
import io.kestra.core.models.property.URIFetcher; import io.kestra.core.models.property.URIFetcher;
import io.kestra.core.models.tasks.common.EncryptedString; import io.kestra.core.models.tasks.common.EncryptedString;
import io.kestra.core.models.validations.ManualConstraintViolation;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.storages.StorageContext; import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface; import io.kestra.core.storages.StorageInterface;
@@ -157,7 +158,11 @@ public class FlowInputOutput {
File tempFile = File.createTempFile(prefix, fileExtension); File tempFile = File.createTempFile(prefix, fileExtension);
try (var inputStream = fileUpload.getInputStream(); try (var inputStream = fileUpload.getInputStream();
var outputStream = new FileOutputStream(tempFile)) { var outputStream = new FileOutputStream(tempFile)) {
inputStream.transferTo(outputStream); long transferredBytes = inputStream.transferTo(outputStream);
if (transferredBytes == 0) {
sink.error(new KestraRuntimeException("Can't upload file: " + fileUpload.getFilename()));
return;
}
URI from = storageInterface.from(execution, inputId, fileName, tempFile); URI from = storageInterface.from(execution, inputId, fileName, tempFile);
sink.next(Map.entry(inputId, from.toString())); sink.next(Map.entry(inputId, from.toString()));
} finally { } finally {
@@ -208,8 +213,8 @@ public class FlowInputOutput {
.filter(InputAndValue::enabled) .filter(InputAndValue::enabled)
.map(it -> { .map(it -> {
//TODO check to return all exception at-once. //TODO check to return all exception at-once.
if (it.exceptions() != null && !it.exceptions().isEmpty()) { if (it.exception() != null) {
throw InputOutputValidationException.merge(it.exceptions()); throw it.exception();
} }
return new AbstractMap.SimpleEntry<>(it.input().getId(), it.value()); return new AbstractMap.SimpleEntry<>(it.input().getId(), it.value());
}) })
@@ -293,9 +298,13 @@ public class FlowInputOutput {
try { try {
isInputEnabled = Boolean.TRUE.equals(runContext.renderTyped(dependsOnCondition.get())); isInputEnabled = Boolean.TRUE.equals(runContext.renderTyped(dependsOnCondition.get()));
} catch (IllegalVariableEvaluationException e) { } catch (IllegalVariableEvaluationException e) {
resolvable.resolveWithError( resolvable.resolveWithError(ManualConstraintViolation.toConstraintViolationException(
InputOutputValidationException.of("Invalid condition: " + e.getMessage()) "Invalid condition: " + e.getMessage(),
); input,
(Class<Input>)input.getClass(),
input.getId(),
this
));
isInputEnabled = false; isInputEnabled = false;
} }
} }
@@ -328,7 +337,7 @@ public class FlowInputOutput {
// validate and parse input value // validate and parse input value
if (value == null) { if (value == null) {
if (input.getRequired()) { if (input.getRequired()) {
resolvable.resolveWithError(InputOutputValidationException.of("Missing required input:" + input.getId())); resolvable.resolveWithError(input.toConstraintViolationException("missing required input", null));
} else { } else {
resolvable.resolveWithValue(null); resolvable.resolveWithValue(null);
} }
@@ -338,18 +347,17 @@ public class FlowInputOutput {
parsedInput.ifPresent(parsed -> ((Input) resolvable.get().input()).validate(parsed.getValue())); parsedInput.ifPresent(parsed -> ((Input) resolvable.get().input()).validate(parsed.getValue()));
parsedInput.ifPresent(typed -> resolvable.resolveWithValue(typed.getValue())); parsedInput.ifPresent(typed -> resolvable.resolveWithValue(typed.getValue()));
} catch (ConstraintViolationException e) { } catch (ConstraintViolationException e) {
Input<?> finalInput = input; ConstraintViolationException exception = e.getConstraintViolations().size() == 1 ?
Set<InputOutputValidationException> exceptions = e.getConstraintViolations().stream() input.toConstraintViolationException(List.copyOf(e.getConstraintViolations()).getFirst().getMessage(), value) :
.map(c-> InputOutputValidationException.of(c.getMessage(), finalInput)) input.toConstraintViolationException(e.getMessage(), value);
.collect(Collectors.toSet()); resolvable.resolveWithError(exception);
resolvable.resolveWithError(exceptions);
} }
} }
} catch (IllegalArgumentException e){ } catch (ConstraintViolationException e) {
resolvable.resolveWithError(InputOutputValidationException.of(e.getMessage(), input)); resolvable.resolveWithError(e);
} } catch (Exception e) {
catch (Exception e) { ConstraintViolationException exception = input.toConstraintViolationException(e instanceof IllegalArgumentException ? e.getMessage() : e.toString(), resolvable.get().value());
resolvable.resolveWithError(InputOutputValidationException.of(e.getMessage())); resolvable.resolveWithError(exception);
} }
return resolvable.get(); return resolvable.get();
@@ -374,11 +382,11 @@ public class FlowInputOutput {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException { private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.as((Property<T>) input.getDefaults().skipCache(), renderer, clazz); return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException { private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.asList((Property<List<T>>) input.getDefaults().skipCache(), renderer, clazz); return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
} }
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) { private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
@@ -437,12 +445,8 @@ public class FlowInputOutput {
} }
return entry; return entry;
}); });
} } catch (Exception e) {
catch (IllegalArgumentException e){ throw output.toConstraintViolationException(e.getMessage(), current);
throw InputOutputValidationException.of(e.getMessage(), output);
}
catch (Exception e) {
throw InputOutputValidationException.of(e.getMessage());
} }
}) })
.filter(Optional::isPresent) .filter(Optional::isPresent)
@@ -498,14 +502,14 @@ public class FlowInputOutput {
yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString())); yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString()));
} }
} }
case JSON -> (current instanceof Map || current instanceof Collection<?>) ? current : JacksonMapper.toObject(current.toString()); case JSON -> JacksonMapper.toObject(current.toString());
case YAML -> (current instanceof Map || current instanceof Collection<?>) ? current : YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE); case YAML -> YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
case URI -> { case URI -> {
Matcher matcher = URI_PATTERN.matcher(current.toString()); Matcher matcher = URI_PATTERN.matcher(current.toString());
if (matcher.matches()) { if (matcher.matches()) {
yield current.toString(); yield current.toString();
} else { } else {
throw new IllegalArgumentException("Invalid URI format."); throw new IllegalArgumentException("Expected `URI` but received `" + current + "`");
} }
} }
case ARRAY, MULTISELECT -> { case ARRAY, MULTISELECT -> {
@@ -535,10 +539,34 @@ public class FlowInputOutput {
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw e; throw e;
} catch (Throwable e) { } catch (Throwable e) {
throw new Exception(" errors:\n```\n" + e.getMessage() + "\n```"); throw new Exception("Expected `" + type + "` but received `" + current + "` with errors:\n```\n" + e.getMessage() + "\n```");
} }
} }
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
/** /**
* Mutable wrapper to hold a flow's input, and it's resolved value. * Mutable wrapper to hold a flow's input, and it's resolved value.
*/ */
@@ -567,30 +595,27 @@ public class FlowInputOutput {
} }
public void isDefault(boolean isDefault) { public void isDefault(boolean isDefault) {
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exceptions()); this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exception());
} }
public void setInput(final Input<?> input) { public void setInput(final Input<?> input) {
this.input = new InputAndValue(input, this.input.value(), this.input.enabled(), this.input.isDefault(), this.input.exceptions()); this.input = new InputAndValue(input, this.input.value(), this.input.enabled(), this.input.isDefault(), this.input.exception());
} }
public void resolveWithEnabled(boolean enabled) { public void resolveWithEnabled(boolean enabled) {
this.input = new InputAndValue(this.input.input(), input.value(), enabled, this.input.isDefault(), this.input.exceptions()); this.input = new InputAndValue(this.input.input(), input.value(), enabled, this.input.isDefault(), this.input.exception());
markAsResolved(); markAsResolved();
} }
public void resolveWithValue(@Nullable Object value) { public void resolveWithValue(@Nullable Object value) {
this.input = new InputAndValue(this.input.input(), value, this.input.enabled(), this.input.isDefault(), this.input.exceptions()); this.input = new InputAndValue(this.input.input(), value, this.input.enabled(), this.input.isDefault(), this.input.exception());
markAsResolved(); markAsResolved();
} }
public void resolveWithError(@Nullable Set<InputOutputValidationException> exception) { public void resolveWithError(@Nullable ConstraintViolationException exception) {
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), this.input.isDefault(), exception); this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), this.input.isDefault(), exception);
markAsResolved(); markAsResolved();
} }
private void resolveWithError(@Nullable InputOutputValidationException exception){
resolveWithError(Collections.singleton(exception));
}
private void markAsResolved() { private void markAsResolved() {
this.isResolved = true; this.isResolved = true;

View File

@@ -1,29 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import java.util.List;
import java.util.Map;
/**
* InputAndOutput could be used to work with flow execution inputs and outputs.
*/
public interface InputAndOutput {
/**
* Reads the inputs of a flow execution.
*/
Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs);
/**
* Processes the outputs of a flow execution (parse them based on their types).
*/
Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs);
/**
* Render flow execution outputs.
*/
Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException;
}

View File

@@ -1,56 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import io.micronaut.context.ApplicationContext;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class InputAndOutputImpl implements InputAndOutput {
private final FlowInputOutput flowInputOutput;
private final RunContext runContext;
InputAndOutputImpl(ApplicationContext applicationContext, RunContext runContext) {
this.flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
this.runContext = runContext;
}
@Override
public Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs) {
return flowInputOutput.readExecutionInputs(flow, execution, inputs);
}
@Override
public Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs) {
return flowInputOutput.typedOutputs(flow, execution, rOutputs);
}
@Override
public Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
}

View File

@@ -4,7 +4,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.encryption.EncryptionService; import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException; import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.AbstractMetricEntry; import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.property.PropertyContext; import io.kestra.core.models.property.PropertyContext;
@@ -205,15 +204,4 @@ public abstract class RunContext implements PropertyContext {
* when Namespace ACLs are used (EE). * when Namespace ACLs are used (EE).
*/ */
public abstract AclChecker acl(); public abstract AclChecker acl();
/**
* Clone this run context for a specific plugin.
* @return a new run context with the plugin configuration of the given plugin.
*/
public abstract RunContext cloneForPlugin(Plugin plugin);
/**
* @return an InputAndOutput that can be used to work with inputs and outputs.
*/
public abstract InputAndOutput inputAndOutput();
} }

View File

@@ -1,8 +1,10 @@
package io.kestra.core.runners; package io.kestra.core.runners;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.tasks.Task; import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.runners.TaskRunner;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.TriggerContext; import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.plugins.PluginConfigurations; import io.kestra.core.plugins.PluginConfigurations;
@@ -51,6 +53,20 @@ public class RunContextInitializer {
@Value("${kestra.encryption.secret-key}") @Value("${kestra.encryption.secret-key}")
protected Optional<String> secretKey; protected Optional<String> secretKey;
/**
* Initializes the given {@link RunContext} for the given {@link Plugin}.
*
* @param runContext The {@link RunContext} to initialize.
* @param plugin The {@link TaskRunner} used for initialization.
* @return The {@link RunContext} to initialize
*/
public DefaultRunContext forPlugin(final DefaultRunContext runContext,
final Plugin plugin) {
runContext.init(applicationContext);
runContext.setPluginConfiguration(pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass()));
return runContext;
}
/** /**
* Initializes the given {@link RunContext} for the given {@link WorkerTask} for executor. * Initializes the given {@link RunContext} for the given {@link WorkerTask} for executor.
* *

View File

@@ -55,11 +55,11 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
public RunContextLogger(QueueInterface<LogEntry> logQueue, LogEntry logEntry, org.slf4j.event.Level loglevel, boolean logToFile) { public RunContextLogger(QueueInterface<LogEntry> logQueue, LogEntry logEntry, org.slf4j.event.Level loglevel, boolean logToFile) {
if (logEntry.getTaskId() != null) { if (logEntry.getTaskId() != null) {
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTaskId(); this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTaskId();
} else if (logEntry.getTriggerId() != null) { } else if (logEntry.getTriggerId() != null) {
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTriggerId(); this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTriggerId();
} else { } else {
this.loggerName = baseLoggerName(logEntry); this.loggerName = "flow." + logEntry.getFlowId();
} }
this.logQueue = logQueue; this.logQueue = logQueue;
@@ -68,10 +68,6 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
this.logToFile = logToFile; this.logToFile = logToFile;
} }
private String baseLoggerName(LogEntry logEntry) {
return "flow." + logEntry.getTenantId() + "." + logEntry.getNamespace() + "." + logEntry.getFlowId();
}
private static List<LogEntry> logEntry(ILoggingEvent event, String message, org.slf4j.event.Level level, LogEntry logEntry) { private static List<LogEntry> logEntry(ILoggingEvent event, String message, org.slf4j.event.Level level, LogEntry logEntry) {
Iterable<String> split; Iterable<String> split;

View File

@@ -8,7 +8,6 @@ import io.micronaut.core.annotation.Nullable;
import io.pebbletemplates.pebble.PebbleEngine; import io.pebbletemplates.pebble.PebbleEngine;
import io.pebbletemplates.pebble.extension.Extension; import io.pebbletemplates.pebble.extension.Extension;
import io.pebbletemplates.pebble.extension.Function; import io.pebbletemplates.pebble.extension.Function;
import io.pebbletemplates.pebble.lexer.Syntax;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.inject.Singleton; import jakarta.inject.Singleton;
@@ -38,13 +37,6 @@ public class PebbleEngineFactory {
return builder.build(); return builder.build();
} }
public PebbleEngine createWithCustomSyntax(Syntax syntax, Class<? extends Extension> extension) {
PebbleEngine.Builder builder = newPebbleEngineBuilder()
.syntax(syntax);
this.applicationContext.getBeansOfType(extension).forEach(builder::extension);
return builder.build();
}
public PebbleEngine createWithMaskedFunctions(VariableRenderer renderer, final List<String> functionsToMask) { public PebbleEngine createWithMaskedFunctions(VariableRenderer renderer, final List<String> functionsToMask) {
PebbleEngine.Builder builder = newPebbleEngineBuilder(); PebbleEngine.Builder builder = newPebbleEngineBuilder();

View File

@@ -35,10 +35,6 @@ public final class YamlParser {
return read(input, cls, type(cls)); return read(input, cls, type(cls));
} }
public static <T> T parse(String input, Class<T> cls, Boolean strict) {
return strict ? read(input, cls, type(cls)) : readNonStrict(input, cls, type(cls));
}
public static <T> T parse(Map<String, Object> input, Class<T> cls, Boolean strict) { public static <T> T parse(Map<String, Object> input, Class<T> cls, Boolean strict) {
ObjectMapper currentMapper = strict ? STRICT_MAPPER : NON_STRICT_MAPPER; ObjectMapper currentMapper = strict ? STRICT_MAPPER : NON_STRICT_MAPPER;
@@ -85,31 +81,7 @@ public final class YamlParser {
throw toConstraintViolationException(input, resource, e); throw toConstraintViolationException(input, resource, e);
} }
} }
private static <T> T readNonStrict(String input, Class<T> objectClass, String resource) {
try {
return NON_STRICT_MAPPER.readValue(input, objectClass);
} catch (JsonProcessingException e) {
throw toConstraintViolationException(input, resource, e);
}
}
private static String formatYamlErrorMessage(String originalMessage, JsonProcessingException e) {
StringBuilder friendlyMessage = new StringBuilder();
if (originalMessage.contains("Expected a field name")) {
friendlyMessage.append("YAML syntax error: Invalid structure. Check indentation and ensure all fields are properly formatted.");
} else if (originalMessage.contains("MappingStartEvent")) {
friendlyMessage.append("YAML syntax error: Unexpected mapping start. Verify that scalar values are properly quoted if needed.");
} else if (originalMessage.contains("Scalar value")) {
friendlyMessage.append("YAML syntax error: Expected a simple value but found complex structure. Check for unquoted special characters.");
} else {
friendlyMessage.append("YAML parsing error: ").append(originalMessage.replaceAll("org\\.yaml\\.snakeyaml.*", "").trim());
}
if (e.getLocation() != null) {
int line = e.getLocation().getLineNr();
friendlyMessage.append(String.format(" (at line %d)", line));
}
// Return a generic but cleaner message for other YAML errors
return friendlyMessage.toString();
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static <T> ConstraintViolationException toConstraintViolationException(T target, String resource, JsonProcessingException e) { public static <T> ConstraintViolationException toConstraintViolationException(T target, String resource, JsonProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException constraintViolationException) { if (e.getCause() instanceof ConstraintViolationException constraintViolationException) {
@@ -149,12 +121,11 @@ public final class YamlParser {
) )
)); ));
} else { } else {
String userFriendlyMessage = formatYamlErrorMessage(e.getMessage(), e);
return new ConstraintViolationException( return new ConstraintViolationException(
"Illegal " + resource + " source: " + userFriendlyMessage, "Illegal " + resource + " source: " + e.getMessage(),
Collections.singleton( Collections.singleton(
ManualConstraintViolation.of( ManualConstraintViolation.of(
userFriendlyMessage, e.getCause() == null ? e.getMessage() : e.getMessage() + "\nCaused by: " + e.getCause().getMessage(),
target, target,
(Class<T>) target.getClass(), (Class<T>) target.getClass(),
"yaml", "yaml",
@@ -165,3 +136,4 @@ public final class YamlParser {
} }
} }
} }

View File

@@ -4,6 +4,7 @@ import com.cronutils.utils.VisibleForTesting;
import io.kestra.core.exceptions.InternalException; import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.conditions.Condition; import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext; import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface; import io.kestra.core.models.flows.FlowInterface;
@@ -64,6 +65,16 @@ public class ConditionService {
return this.valid(flow, conditions, conditionContext); return this.valid(flow, conditions, conditionContext);
} }
/**
* Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated.
*/
public boolean isValid(List<ScheduleCondition> conditions, ConditionContext conditionContext) throws InternalException {
return conditions
.stream()
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
}
/** /**
* Check that all conditions are valid. * Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated. * Warning, this method throws if a condition cannot be evaluated.

View File

@@ -754,7 +754,7 @@ public class ExecutionService {
var parentTaskRun = execution.findTaskRunByTaskRunId(taskRun.getParentTaskRunId()); var parentTaskRun = execution.findTaskRunByTaskRunId(taskRun.getParentTaskRunId());
Execution newExecution = execution; Execution newExecution = execution;
if (parentTaskRun.getState().getCurrent() != State.Type.KILLED) { if (parentTaskRun.getState().getCurrent() != State.Type.KILLED) {
newExecution = newExecution.withTaskRun(parentTaskRun.withStateAndAttempt(State.Type.KILLED)); newExecution = newExecution.withTaskRun(parentTaskRun.withState(State.Type.KILLED));
} }
if (parentTaskRun.getParentTaskRunId() != null) { if (parentTaskRun.getParentTaskRunId() != null) {
return killParentTaskruns(parentTaskRun, newExecution); return killParentTaskruns(parentTaskRun, newExecution);

View File

@@ -92,14 +92,7 @@ public class FlowService {
return flowRepository return flowRepository
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository")); .orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
} }
private static String formatValidationError(String message) {
if (message.startsWith("Illegal flow source:")) {
// Already formatted by YamlParser, return as-is
return message;
}
// For other validation errors, provide context
return "Validation error: " + message;
}
/** /**
* Evaluates all checks defined in the given flow using the provided inputs. * Evaluates all checks defined in the given flow using the provided inputs.
* <p> * <p>
@@ -181,12 +174,10 @@ public class FlowService {
modelValidator.validate(pluginDefaultService.injectAllDefaults(flow, false)); modelValidator.validate(pluginDefaultService.injectAllDefaults(flow, false));
} catch (ConstraintViolationException e) { } catch (ConstraintViolationException e) {
String friendlyMessage = formatValidationError(e.getMessage()); validateConstraintViolationBuilder.constraints(e.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
} catch (FlowProcessingException e) { } catch (FlowProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException cve) { if (e.getCause() instanceof ConstraintViolationException) {
String friendlyMessage = formatValidationError(cve.getMessage()); validateConstraintViolationBuilder.constraints(e.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
} else { } else {
Throwable cause = e.getCause() != null ? e.getCause() : e; Throwable cause = e.getCause() != null ? e.getCause() : e;
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + cause.getMessage()); validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + cause.getMessage());
@@ -588,4 +579,4 @@ public class FlowService {
private IllegalStateException noRepositoryException() { private IllegalStateException noRepositoryException() {
return new IllegalStateException("No repository found. Make sure the `kestra.repository.type` property is set."); return new IllegalStateException("No repository found. Make sure the `kestra.repository.type` property is set.");
} }
} }

View File

@@ -1,5 +1,6 @@
package io.kestra.core.storages; package io.kestra.core.storages;
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
import io.kestra.core.services.NamespaceService; import io.kestra.core.services.NamespaceService;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -271,13 +272,7 @@ public class InternalStorage implements Storage {
return this.storage.put(context.getTenantId(), context.getNamespace(), resolve, new BufferedInputStream(inputStream)); return this.storage.put(context.getTenantId(), context.getNamespace(), resolve, new BufferedInputStream(inputStream));
} }
@Override
public Optional<StorageContext.Task> getTaskStorageContext() { public Optional<StorageContext.Task> getTaskStorageContext() {
return Optional.ofNullable((context instanceof StorageContext.Task task) ? task : null); return Optional.ofNullable((context instanceof StorageContext.Task task) ? task : null);
} }
@Override
public List<FileAttributes> list(URI uri) throws IOException {
return this.storage.list(context.getTenantId(), context.getNamespace(), uri);
}
} }

View File

@@ -173,6 +173,4 @@ public interface Storage {
* @return the task storage context * @return the task storage context
*/ */
Optional<StorageContext.Task> getTaskStorageContext(); Optional<StorageContext.Task> getTaskStorageContext();
List<FileAttributes> list(URI uri) throws IOException;
} }

View File

@@ -1,39 +1,13 @@
package io.kestra.core.utils; package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Singleton; import jakarta.inject.Singleton;
import java.util.Optional;
@Singleton @Singleton
public class EditionProvider { public class EditionProvider {
public Edition get() { public Edition get() {
return Edition.OSS; return Edition.OSS;
} }
@Inject
private Optional<SettingRepositoryInterface> settingRepository; // repositories are not always there on unit tests
@PostConstruct
void start() {
// check the edition in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
settingRepository.ifPresent(settingRepositoryInterface -> persistEdition(settingRepositoryInterface, get()));
}
private void persistEdition(SettingRepositoryInterface settingRepositoryInterface, Edition edition) {
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_EDITION);
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(edition)) {
settingRepositoryInterface.save(Setting.builder()
.key(Setting.INSTANCE_EDITION)
.value(edition)
.build()
);
}
}
public enum Edition { public enum Edition {
OSS, OSS,
EE EE

View File

@@ -11,11 +11,6 @@ import jakarta.inject.Inject;
import jakarta.inject.Singleton; import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
/**
* Utility class to create {@link java.util.concurrent.ExecutorService} with {@link java.util.concurrent.ExecutorService} instances.
* WARNING: those instances will use the {@link ThreadUncaughtExceptionHandler} which terminates Kestra if an error occurs in any thread,
* so it should not be used inside plugins.
*/
@Singleton @Singleton
@Slf4j @Slf4j
public class ExecutorsUtils { public class ExecutorsUtils {

View File

@@ -65,17 +65,10 @@ public class ListUtils {
} }
public static List<String> convertToListString(Object object){ public static List<String> convertToListString(Object object){
return convertToList(object) if (object instanceof List<?> list && (list.isEmpty() || list.getFirst() instanceof String)) {
.stream() return (List<String>) list;
.map(Object::toString) } else {
.toList(); throw new IllegalArgumentException("%s in not an instance of List of String".formatted(object));
}
public static <T> List<List<T>> partition(List<T> list, int size) {
List<List<T>> parts = new ArrayList<>();
for (int i = 0; i < list.size(); i += size) {
parts.add(list.subList(i, Math.min(i + size, list.size())));
} }
return parts;
} }
} }

View File

@@ -10,7 +10,7 @@ import org.slf4j.LoggerFactory;
import org.slf4j.event.Level; import org.slf4j.event.Level;
/** /**
* Utility class for server logging * Utility class for logging
*/ */
public final class Logs { public final class Logs {
@@ -18,7 +18,7 @@ public final class Logs {
private static final String EXECUTION_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[execution: {}] "; private static final String EXECUTION_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[execution: {}] ";
private static final String TRIGGER_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[trigger: {}] "; private static final String TRIGGER_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[trigger: {}] ";
private static final String TASKRUN_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[task: {}] [execution: {}] [taskrun: {}] "; private static final String TASKRUN_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[task: {}] [execution: {}] [taskrun: {}] ";
private Logs() {} private Logs() {}
public static void logExecution(FlowId flow, Logger logger, Level level, String message, Object... args) { public static void logExecution(FlowId flow, Logger logger, Level level, String message, Object... args) {
@@ -29,7 +29,7 @@ public final class Logs {
} }
/** /**
* Log an {@link Execution} via the executor logger named: 'executor.{tenantId}.{namespace}.{flowId}'. * Log an {@link Execution} via the execution logger named: 'execution.{flowId}'.
*/ */
public static void logExecution(Execution execution, Level level, String message, Object... args) { public static void logExecution(Execution execution, Level level, String message, Object... args) {
Logger logger = logger(execution); Logger logger = logger(execution);
@@ -43,7 +43,7 @@ public final class Logs {
} }
/** /**
* Log a {@link TriggerContext} via the scheduler logger named: 'trigger.{tenantId}.{namespace}.{flowId}.{triggerId}'. * Log a {@link TriggerContext} via the trigger logger named: 'trigger.{flowId}.{triggereId}'.
*/ */
public static void logTrigger(TriggerContext triggerContext, Level level, String message, Object... args) { public static void logTrigger(TriggerContext triggerContext, Level level, String message, Object... args) {
Logger logger = logger(triggerContext); Logger logger = logger(triggerContext);
@@ -57,7 +57,7 @@ public final class Logs {
} }
/** /**
* Log a {@link TaskRun} via the worker logger named: 'worker.{tenantId}.{namespace}.{flowId}.{taskId}'. * Log a {@link TaskRun} via the taskRun logger named: 'task.{flowId}.{taskId}'.
*/ */
public static void logTaskRun(TaskRun taskRun, Level level, String message, Object... args) { public static void logTaskRun(TaskRun taskRun, Level level, String message, Object... args) {
String prefix = TASKRUN_PREFIX_WITH_TENANT; String prefix = TASKRUN_PREFIX_WITH_TENANT;
@@ -73,19 +73,19 @@ public final class Logs {
private static Logger logger(TaskRun taskRun) { private static Logger logger(TaskRun taskRun) {
return LoggerFactory.getLogger( return LoggerFactory.getLogger(
"worker." + taskRun.getTenantId() + "." + taskRun.getNamespace() + "." + taskRun.getFlowId() + "." + taskRun.getTaskId() "task." + taskRun.getFlowId() + "." + taskRun.getTaskId()
); );
} }
private static Logger logger(TriggerContext triggerContext) { private static Logger logger(TriggerContext triggerContext) {
return LoggerFactory.getLogger( return LoggerFactory.getLogger(
"scheduler." + triggerContext.getTenantId() + "." + triggerContext.getNamespace() + "." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId() "trigger." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId()
); );
} }
private static Logger logger(Execution execution) { private static Logger logger(Execution execution) {
return LoggerFactory.getLogger( return LoggerFactory.getLogger(
"executor." + execution.getTenantId() + "." + execution.getNamespace() + "." + execution.getFlowId() "execution." + execution.getFlowId()
); );
} }
} }

View File

@@ -120,10 +120,7 @@ public class MapUtils {
private static Collection<?> mergeCollections(Collection<?> colA, Collection<?> colB) { private static Collection<?> mergeCollections(Collection<?> colA, Collection<?> colB) {
List<Object> merged = new ArrayList<>(colA.size() + colB.size()); List<Object> merged = new ArrayList<>(colA.size() + colB.size());
merged.addAll(colA); merged.addAll(colA);
if (!colB.isEmpty()) { merged.addAll(colB);
List<?> filtered = colB.stream().filter(it -> !colA.contains(it)).toList();
merged.addAll(filtered);
}
return merged; return merged;
} }

View File

@@ -1,12 +1,14 @@
package io.kestra.core.utils; package io.kestra.core.utils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.kestra.core.models.executions.metrics.Counter; import io.kestra.core.models.executions.metrics.Counter;
import io.kestra.core.models.executions.metrics.Timer; import io.kestra.core.models.executions.metrics.Timer;
import io.kestra.core.models.tasks.FileExistComportment; import io.kestra.core.models.tasks.FileExistComportment;
import io.kestra.core.models.tasks.NamespaceFiles; import io.kestra.core.models.tasks.NamespaceFiles;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.storages.NamespaceFile; import io.kestra.core.storages.NamespaceFile;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DurationFormatUtils; import org.apache.commons.lang3.time.DurationFormatUtils;
import org.apache.commons.lang3.time.StopWatch; import org.apache.commons.lang3.time.StopWatch;
@@ -17,27 +19,26 @@ import java.io.InputStream;
import java.nio.file.Path; import java.nio.file.Path;
import java.time.Duration; import java.time.Duration;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.concurrent.*; import java.util.Map;
import java.util.concurrent.ExecutorService;
import static io.kestra.core.utils.Rethrow.throwConsumer; import static io.kestra.core.utils.Rethrow.throwConsumer;
public final class NamespaceFilesUtils { @Singleton
private static final int maxThreads = Math.max(Runtime.getRuntime().availableProcessors() * 4, 32); public class NamespaceFilesUtils {
private static final ExecutorService EXECUTOR_SERVICE = new ThreadPoolExecutor( @Inject
0, private ExecutorsUtils executorsUtils;
maxThreads,
60L,
TimeUnit.SECONDS,
new SynchronousQueue<>(),
new ThreadFactoryBuilder().setNameFormat("namespace-files").build()
);;
private NamespaceFilesUtils() { private ExecutorService executorService;
// utility class pattern
@PostConstruct
public void postConstruct() {
this.executorService = executorsUtils.maxCachedThreadPool(Math.max(Runtime.getRuntime().availableProcessors() * 4, 32), "namespace-file");
} }
public static void loadNamespaceFiles( public void loadNamespaceFiles(
RunContext runContext, RunContext runContext,
NamespaceFiles namespaceFiles NamespaceFiles namespaceFiles
) )
@@ -62,11 +63,7 @@ public final class NamespaceFilesUtils {
matchedNamespaceFiles.addAll(files); matchedNamespaceFiles.addAll(files);
} }
// Use half of the available threads to avoid impacting concurrent tasks
int parallelism = maxThreads / 2;
Flux.fromIterable(matchedNamespaceFiles) Flux.fromIterable(matchedNamespaceFiles)
.parallel(parallelism)
.runOn(Schedulers.fromExecutorService(EXECUTOR_SERVICE))
.doOnNext(throwConsumer(nsFile -> { .doOnNext(throwConsumer(nsFile -> {
InputStream content = runContext.storage().getFile(nsFile.uri()); InputStream content = runContext.storage().getFile(nsFile.uri());
Path path = folderPerNamespace ? Path path = folderPerNamespace ?
@@ -74,7 +71,7 @@ public final class NamespaceFilesUtils {
Path.of(nsFile.path()); Path.of(nsFile.path());
runContext.workingDir().putFile(path, content, fileExistComportment); runContext.workingDir().putFile(path, content, fileExistComportment);
})) }))
.sequential() .publishOn(Schedulers.fromExecutorService(executorService))
.blockLast(); .blockLast();
Duration duration = stopWatch.getDuration(); Duration duration = stopWatch.getDuration();

View File

@@ -23,6 +23,7 @@ import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.services.StorageService; import io.kestra.core.services.StorageService;
import io.kestra.core.storages.FileAttributes; import io.kestra.core.storages.FileAttributes;
import io.kestra.core.storages.StorageContext; import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.StorageSplitInterface; import io.kestra.core.storages.StorageSplitInterface;
import io.kestra.core.utils.GraphUtils; import io.kestra.core.utils.GraphUtils;
import io.kestra.core.validations.NoSystemLabelValidation; import io.kestra.core.validations.NoSystemLabelValidation;
@@ -539,7 +540,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
.numberOfBatches((Integer) taskRun.getOutputs().get(ExecutableUtils.TASK_VARIABLE_NUMBER_OF_BATCHES)); .numberOfBatches((Integer) taskRun.getOutputs().get(ExecutableUtils.TASK_VARIABLE_NUMBER_OF_BATCHES));
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
FileSerde.write(bos, runContext.inputAndOutput().renderOutputs(flow.getOutputs())); FileSerde.write(bos, FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext));
URI uri = runContext.storage().putFile( URI uri = runContext.storage().putFile(
new ByteArrayInputStream(bos.toByteArray()), new ByteArrayInputStream(bos.toByteArray()),
URI.create((String) taskRun.getOutputs().get("uri")) URI.create((String) taskRun.getOutputs().get("uri"))
@@ -601,8 +602,9 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
String subflowOutputsBase = (String) taskOutput.get(ExecutableUtils.TASK_VARIABLE_SUBFLOW_OUTPUTS_BASE_URI); String subflowOutputsBase = (String) taskOutput.get(ExecutableUtils.TASK_VARIABLE_SUBFLOW_OUTPUTS_BASE_URI);
URI subflowOutputsBaseUri = URI.create(StorageContext.KESTRA_PROTOCOL + subflowOutputsBase + "/"); URI subflowOutputsBaseUri = URI.create(StorageContext.KESTRA_PROTOCOL + subflowOutputsBase + "/");
if (runContext.storage().isFileExist(subflowOutputsBaseUri)) { StorageInterface storage = ((DefaultRunContext) runContext).getApplicationContext().getBean(StorageInterface.class);
List<FileAttributes> list = runContext.storage().list(subflowOutputsBaseUri);; if (storage.exists(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri)) {
List<FileAttributes> list = storage.list(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri);
if (!list.isEmpty()) { if (!list.isEmpty()) {
// Merge outputs from each sub-flow into a single stored in the internal storage. // Merge outputs from each sub-flow into a single stored in the internal storage.

View File

@@ -157,7 +157,7 @@ public class LoopUntil extends Task implements FlowableTask<LoopUntil.Output> {
public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException { public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException {
if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) { if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) {
String continueLoop = runContext.render(this.condition).skipCache().as(String.class).orElse(null); String continueLoop = runContext.render(this.condition).as(String.class).orElse(null);
if (!TruthUtils.isTruthy(continueLoop)) { if (!TruthUtils.isTruthy(continueLoop)) {
return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow()); return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow());
} }

View File

@@ -63,8 +63,7 @@ import java.util.*;
- id: run_post_approval - id: run_post_approval
type: io.kestra.plugin.scripts.shell.Commands type: io.kestra.plugin.scripts.shell.Commands
taskRunner: runner: PROCESS
type: io.kestra.plugin.core.runner.Process
commands: commands:
- echo "Manual approval received! Continuing the execution..." - echo "Manual approval received! Continuing the execution..."

View File

@@ -18,6 +18,7 @@ import io.kestra.core.models.tasks.ExecutableTask;
import io.kestra.core.models.tasks.Task; import io.kestra.core.models.tasks.Task;
import io.kestra.core.runners.DefaultRunContext; import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.ExecutableUtils; import io.kestra.core.runners.ExecutableUtils;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.FlowMetaStoreInterface; import io.kestra.core.runners.FlowMetaStoreInterface;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecution; import io.kestra.core.runners.SubflowExecution;
@@ -37,6 +38,7 @@ import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.ToString; import lombok.ToString;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import org.slf4j.event.Level;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.Collections; import java.util.Collections;
@@ -244,11 +246,11 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
if (subflowOutputs != null && !subflowOutputs.isEmpty()) { if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
try { try {
var inputAndOutput = runContext.inputAndOutput(); Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
Map<String, Object> rOutputs = inputAndOutput.renderOutputs(subflowOutputs);
if (flow.getOutputs() != null) { FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
rOutputs = inputAndOutput.typedOutputs(flow, execution, rOutputs); if (flow.getOutputs() != null && flowInputOutput != null) {
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
} }
builder.outputs(rOutputs); builder.outputs(rOutputs);
} catch (Exception e) { } catch (Exception e) {

View File

@@ -123,7 +123,7 @@ public class Switch extends Task implements FlowableTask<Switch.Output> {
} }
private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException { private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException {
return runContext.render(this.value).skipCache().as(String.class).orElseThrow(); return runContext.render(this.value).as(String.class).orElseThrow();
} }
@Override @Override

View File

@@ -260,7 +260,8 @@ public class WorkingDirectory extends Sequential implements NamespaceFilesInterf
} }
if (this.namespaceFiles != null && !Boolean.FALSE.equals(runContext.render(this.namespaceFiles.getEnabled()).as(Boolean.class).orElse(true))) { if (this.namespaceFiles != null && !Boolean.FALSE.equals(runContext.render(this.namespaceFiles.getEnabled()).as(Boolean.class).orElse(true))) {
NamespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles); NamespaceFilesUtils namespaceFilesUtils = ((DefaultRunContext) runContext).getApplicationContext().getBean(NamespaceFilesUtils.class);
namespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles);
} }
if (this.inputFiles != null) { if (this.inputFiles != null) {

View File

@@ -2,8 +2,10 @@ package io.kestra.plugin.core.namespace;
import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
import io.kestra.core.storages.Namespace; import io.kestra.core.storages.Namespace;
import io.kestra.core.storages.NamespaceFile; import io.kestra.core.storages.NamespaceFile;
import io.kestra.plugin.core.kv.Version;
import io.micronaut.core.annotation.Introspected; import io.micronaut.core.annotation.Introspected;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;

View File

@@ -1,107 +0,0 @@
package io.kestra.plugin.core.trigger;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Label;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.Backfill;
import io.kestra.core.models.triggers.Schedulable;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.time.chrono.ChronoZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Factory class for constructing a new {@link Execution} from a {@link Schedulable} trigger.
*
* @see io.kestra.plugin.core.trigger.Schedule
* @see io.kestra.plugin.core.trigger.ScheduleOnDates
*/
final class SchedulableExecutionFactory {
static Execution createFailedExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext) throws IllegalVariableEvaluationException {
return Execution.builder()
.id(conditionContext.getRunContext().getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(SchedulableExecutionFactory.getLabels(trigger, conditionContext.getRunContext(), triggerContext.getBackfill(), conditionContext.getFlow()))
.state(new State().withState(State.Type.FAILED))
.build();
}
static Execution createExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext, Map<String, Object> variables, ZonedDateTime scheduleDate) throws IllegalVariableEvaluationException {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of((AbstractTrigger) trigger, variables);
List<Label> labels = getLabels(trigger, runContext, triggerContext.getBackfill(), conditionContext.getFlow());
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(Optional.ofNullable(scheduleDate).map(ChronoZonedDateTime::toInstant).orElse(null))
.build();
Map<String, Object> allInputs = getInputs(trigger, runContext, triggerContext.getBackfill());
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
execution = execution.withInputs(runContext.inputAndOutput().readInputs(conditionContext.getFlow(), execution, allInputs));
return execution;
}
private static Map<String, Object> getInputs(Schedulable trigger, RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (trigger.getInputs() != null) {
inputs.putAll(runContext.render(trigger.getInputs()));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private static List<Label> getLabels(Schedulable trigger, RunContext runContext, Backfill backfill, FlowInterface flow) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, flow, (AbstractTrigger) trigger);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
}

View File

@@ -6,7 +6,9 @@ import com.cronutils.model.time.ExecutionTime;
import com.cronutils.parser.CronParser; import com.cronutils.parser.CronParser;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.InternalException; import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.Example; import io.kestra.core.models.annotations.Example;
import io.kestra.core.models.annotations.Plugin; import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty; import io.kestra.core.models.annotations.PluginProperty;
@@ -14,8 +16,12 @@ import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext; import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition; import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.*; import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.services.ConditionService;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils; import io.kestra.core.utils.ListUtils;
import io.kestra.core.validations.ScheduleValidation; import io.kestra.core.validations.ScheduleValidation;
import io.kestra.core.validations.TimezoneId; import io.kestra.core.validations.TimezoneId;
@@ -23,7 +29,6 @@ import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Null; import jakarta.validation.constraints.Null;
import lombok.AccessLevel;
import lombok.*; import lombok.*;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -35,8 +40,6 @@ import java.time.temporal.ChronoUnit;
import java.util.*; import java.util.*;
import java.util.stream.Stream; import java.util.stream.Stream;
import static io.kestra.core.utils.Rethrow.throwPredicate;
@Slf4j @Slf4j
@SuperBuilder @SuperBuilder
@ToString @ToString
@@ -221,7 +224,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty @PluginProperty
@Deprecated @Deprecated
private List<ScheduleCondition> scheduleConditions; private List<ScheduleCondition> scheduleConditions;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs; private Map<String, Object> inputs;
@Schema( @Schema(
@@ -241,7 +248,13 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty @PluginProperty
@Deprecated @Deprecated
private Map<String, Object> backfill; private Map<String, Object> backfill;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules; private RecoverMissedSchedules recoverMissedSchedules;
@Override @Override
@@ -390,11 +403,20 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
if (!conditionResults) { if (!conditionResults) {
return Optional.empty(); return Optional.empty();
} }
} catch (InternalException ie) { } catch(InternalException ie) {
// validate schedule condition can fail to render variables // validate schedule condition can fail to render variables
// in this case, we return a failed execution so the trigger is not evaluated each second // in this case, we return a failed execution so the trigger is not evaluated each second
runContext.logger().error("Unable to evaluate the Schedule trigger '{}'", this.getId(), ie); runContext.logger().error("Unable to evaluate the Schedule trigger '{}'", this.getId(), ie);
return Optional.of(SchedulableExecutionFactory.createFailedExecution(this, conditionContext, triggerContext)); Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(generateLabels(runContext, conditionContext, backfill))
.state(new State().withState(State.Type.FAILED))
.build();
return Optional.of(execution);
} }
// recalculate true output for previous and next based on conditions // recalculate true output for previous and next based on conditions
@@ -408,12 +430,14 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
variables = scheduleDates.toMap(); variables = scheduleDates.toMap();
} }
Execution execution = SchedulableExecutionFactory.createExecution( Execution execution = TriggerService.generateScheduledExecution(
this, this,
conditionContext, conditionContext,
triggerContext, triggerContext,
generateLabels(runContext, conditionContext, backfill),
generateInputs(runContext, backfill),
variables, variables,
null Optional.empty()
); );
return Optional.of(execution); return Optional.of(execution);
@@ -424,6 +448,34 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
return parser.parse(this.cron); return parser.parse(this.cron);
} }
private List<Label> generateLabels(RunContext runContext, ConditionContext conditionContext, Backfill backfill) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, conditionContext.getFlow(), this);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
private Map<String, Object> generateInputs(RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (this.inputs != null) {
inputs.putAll(runContext.render(this.inputs));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private Optional<Output> scheduleDates(ExecutionTime executionTime, ZonedDateTime date) { private Optional<Output> scheduleDates(ExecutionTime executionTime, ZonedDateTime date) {
Optional<ZonedDateTime> next = executionTime.nextExecution(date.minus(Duration.ofSeconds(1))); Optional<ZonedDateTime> next = executionTime.nextExecution(date.minus(Duration.ofSeconds(1)));
@@ -497,9 +549,9 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
Optional<ZonedDateTime> truePreviousNextDateWithCondition(ExecutionTime executionTime, ConditionContext conditionContext, ZonedDateTime toTestDate, boolean next) throws InternalException { Optional<ZonedDateTime> truePreviousNextDateWithCondition(ExecutionTime executionTime, ConditionContext conditionContext, ZonedDateTime toTestDate, boolean next) throws InternalException {
int upperYearBound = ZonedDateTime.now().getYear() + 10; int upperYearBound = ZonedDateTime.now().getYear() + 10;
int lowerYearBound = ZonedDateTime.now().getYear() - 10; int lowerYearBound = ZonedDateTime.now().getYear() - 10;
while ((next && toTestDate.getYear() < upperYearBound) || (!next && toTestDate.getYear() > lowerYearBound)) { while ((next && toTestDate.getYear() < upperYearBound) || (!next && toTestDate.getYear() > lowerYearBound)) {
Optional<ZonedDateTime> currentDate = next ? Optional<ZonedDateTime> currentDate = next ?
executionTime.nextExecution(toTestDate) : executionTime.nextExecution(toTestDate) :
executionTime.lastExecution(toTestDate); executionTime.lastExecution(toTestDate);
@@ -555,10 +607,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
private boolean validateScheduleCondition(ConditionContext conditionContext) throws InternalException { private boolean validateScheduleCondition(ConditionContext conditionContext) throws InternalException {
if (conditions != null) { if (conditions != null) {
return conditions.stream() ConditionService conditionService = ((DefaultRunContext)conditionContext.getRunContext()).getApplicationContext().getBean(ConditionService.class);
.filter(c -> c instanceof ScheduleCondition) return conditionService.isValid(
.map(c -> (ScheduleCondition) c) conditions.stream().filter(c -> c instanceof ScheduleCondition).map(c -> (ScheduleCondition) c).toList(),
.allMatch(throwPredicate(condition -> condition.test(conditionContext))); conditionContext
);
} }
return true; return true;

View File

@@ -10,6 +10,7 @@ import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.VoidOutput; import io.kestra.core.models.tasks.VoidOutput;
import io.kestra.core.models.triggers.*; import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.validations.TimezoneId; import io.kestra.core.validations.TimezoneId;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull; import jakarta.validation.constraints.NotNull;
@@ -22,10 +23,7 @@ import java.time.Duration;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit; import java.time.temporal.ChronoUnit;
import java.util.Collections; import java.util.*;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Predicate; import java.util.function.Predicate;
import static io.kestra.core.utils.Rethrow.throwFunction; import static io.kestra.core.utils.Rethrow.throwFunction;
@@ -47,7 +45,11 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@Builder.Default @Builder.Default
@Null @Null
private final Duration interval = null; private final Duration interval = null;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs; private Map<String, Object> inputs;
@TimezoneId @TimezoneId
@@ -61,24 +63,31 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@NotNull @NotNull
private Property<List<ZonedDateTime>> dates; private Property<List<ZonedDateTime>> dates;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules; private RecoverMissedSchedules recoverMissedSchedules;
@Override @Override
public Optional<Execution> evaluate(ConditionContext conditionContext, TriggerContext triggerContext) throws Exception { public Optional<Execution> evaluate(ConditionContext conditionContext, TriggerContext triggerContext) throws Exception {
RunContext runContext = conditionContext.getRunContext(); RunContext runContext = conditionContext.getRunContext();
ZonedDateTime lastEvaluation = triggerContext.getDate(); ZonedDateTime lastEvaluation = triggerContext.getDate();
Optional<ZonedDateTime> nextDate = nextDate(runContext, date -> date.isEqual(lastEvaluation) || date.isAfter(lastEvaluation)); Optional<ZonedDateTime> nextDate = nextDate(runContext, date -> date.isEqual(lastEvaluation) || date.isAfter(lastEvaluation));
if (nextDate.isPresent()) { if (nextDate.isPresent()) {
log.info("Schedule execution on {}", nextDate.get()); log.info("Schedule execution on {}", nextDate.get());
Execution execution = SchedulableExecutionFactory.createExecution( Execution execution = TriggerService.generateScheduledExecution(
this, this,
conditionContext, conditionContext,
triggerContext, triggerContext,
LabelService.fromTrigger(runContext, conditionContext.getFlow(), this),
this.inputs != null ? runContext.render(this.inputs) : Collections.emptyMap(),
Collections.emptyMap(), Collections.emptyMap(),
nextDate.orElse(null) nextDate
); );
return Optional.of(execution); return Optional.of(execution);
@@ -88,21 +97,29 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
} }
@Override @Override
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> triggerContext) { public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) {
return triggerContext try {
.map(ctx -> ctx.getBackfill() != null ? ctx.getBackfill().getCurrentDate() : ctx.getDate()) return last
.map(this::withTimeZone) .map(throwFunction(context ->
.or(() -> Optional.of(ZonedDateTime.now())) nextDate(conditionContext.getRunContext(), date -> date.isAfter(context.getDate()))
.flatMap(dt -> { .orElse(ZonedDateTime.now().plusYears(1))
try { ))
return nextDate(conditionContext.getRunContext(), date -> date.isAfter(dt)); .orElse(conditionContext.getRunContext()
} catch (IllegalVariableEvaluationException e) { .render(dates)
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage()); .asList(ZonedDateTime.class)
throw new InvalidTriggerConfigurationException("Failed to evaluate schedule 'dates'. Cause: " + e.getMessage()); .stream()
} .sorted()
}).orElseGet(() -> ZonedDateTime.now().plusYears(1)); .findFirst()
.orElse(ZonedDateTime.now()))
.truncatedTo(ChronoUnit.SECONDS);
} catch (IllegalVariableEvaluationException e) {
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
return ZonedDateTime.now().plusYears(1);
}
} }
@Override @Override
public ZonedDateTime nextEvaluationDate() { public ZonedDateTime nextEvaluationDate() {
// TODO this may be the next date from now? // TODO this may be the next date from now?
@@ -122,17 +139,9 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
return previousDates.isEmpty() ? ZonedDateTime.now() : previousDates.getFirst(); return previousDates.isEmpty() ? ZonedDateTime.now() : previousDates.getFirst();
} }
private ZonedDateTime withTimeZone(ZonedDateTime date) { private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> filter) throws IllegalVariableEvaluationException {
if (this.timezone == null) { return runContext.render(dates).asList(ZonedDateTime.class).stream().sorted()
return date; .filter(date -> filter.test(date))
}
return date.withZoneSameInstant(ZoneId.of(this.timezone));
}
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> predicate) throws IllegalVariableEvaluationException {
return runContext.render(dates)
.asList(ZonedDateTime.class).stream().sorted()
.filter(predicate)
.map(throwFunction(date -> timezone == null ? date : date.withZoneSameInstant(ZoneId.of(runContext.render(timezone))))) .map(throwFunction(date -> timezone == null ? date : date.withZoneSameInstant(ZoneId.of(runContext.render(timezone)))))
.findFirst() .findFirst()
.map(date -> date.truncatedTo(ChronoUnit.SECONDS)); .map(date -> date.truncatedTo(ChronoUnit.SECONDS));

View File

@@ -9,14 +9,10 @@
<property name="pattern" value="%date{HH:mm:ss}.%ms %highlight(%-5.5level) %magenta(%-12.36thread) %cyan(%-12.36logger{36}) %msg%n" /> <property name="pattern" value="%date{HH:mm:ss}.%ms %highlight(%-5.5level) %magenta(%-12.36thread) %cyan(%-12.36logger{36}) %msg%n" />
<logger name="io.kestra" level="INFO" /> <logger name="io.kestra" level="INFO" />
<logger name="flow" level="INFO" />
<!-- Flow execution logs - disabled by default --> <logger name="task" level="INFO" />
<logger name="flow" level="OFF" /> <logger name="execution" level="INFO" />
<logger name="trigger" level="INFO" />
<!-- Server loggers -->
<logger name="worker" level="INFO" />
<logger name="executor" level="INFO" />
<logger name="scheduler" level="INFO" />
<logger name="io.kestra.ee.runner.kafka.services.KafkaConsumerService" level="WARN" /> <logger name="io.kestra.ee.runner.kafka.services.KafkaConsumerService" level="WARN" />
<logger name="io.kestra.ee.runner.kafka.services.KafkaProducerService" level="WARN" /> <logger name="io.kestra.ee.runner.kafka.services.KafkaProducerService" level="WARN" />

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.chart
name: "chart"
title: "Chart"
description: "Tasks that render dashboard charts from Kestra data sources."
body: "Use these chart widgets to visualize metrics, executions, or flow trends in dashboards; pair them with dashboard data queries and configure aggregations, groupings, and chart options for Bar, Pie, Time Series, KPI, or Table outputs."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.condition
name: "condition"
title: "Condition"
description: "Tasks that evaluate conditions to control flow execution or triggers."
body: "Use these predicates to gate tasks or triggers based on time windows, calendars, execution metadata, labels, namespaces, retries, or custom expressions; configure required parameters such as allowed states, namespaces, date ranges, or JEXL expressions to return a true/false result."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.data
name: "data"
title: "Data"
description: "Tasks that fetch Kestra executions, flows, logs, metrics, and triggers as datasets for dashboards."
body: "These data providers query Kestra repositories with filters and aggregations to feed dashboard charts; configure columns and fields (such as namespace, state, timestamp, or labels) plus any filters to shape the returned dataset for visualization."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.debug
name: "debug"
title: "Debug"
description: "Tasks that emit debug output while you develop a flow."
body: "Echo and Return help inspect variables and payloads or short-circuit execution during testing; provide the message or value to output so downstream tasks can see exactly what is being passed around."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.execution
name: "execution"
title: "Execution"
description: "Tasks that manage the lifecycle and context of a running execution."
body: "Use these tasks to assert expectations, set or unset variables, add labels, fail, exit, resume, or purge executions; supply required properties such as variable maps, label key/values, or retention rules before altering execution state."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.flow
name: "flow"
title: "Flow"
description: "Tasks that orchestrate control flow within a Kestra pipeline."
body: "Sequence, branch, loop, parallelize, or nest subflows/templates using these primitives; define embedded task lists, values for switches, iteration collections, working directories, and loop exit criteria to structure complex workflows cleanly."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.http
name: "http"
title: "HTTP"
description: "Tasks that interact with HTTP endpoints."
body: "Perform requests, downloads, or webhook triggers with configurable methods, headers, authentication, and payloads; provide the target URI plus any body or query parameters, and use response handling options to store results for downstream tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core
name: "core"
title: "Core Plugins and tasks"
description: "Tasks that provide Kestra's built-in orchestration, I/O, and observability capabilities."
body: "Core plugins cover control-flow, execution management, triggers, storage, HTTP, metrics, logging, templating, and dashboard widgets; combine these foundational tasks to build reliable workflows without adding external dependencies."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.kv
name: "kv"
title: "KV"
description: "Tasks that manage key-value pairs in Kestra's KV store."
body: "Set, get, list, version, and delete namespaced keys to share state across flows; specify the key path, value for writes, and optional namespace or TTL to control how data is stored, retrieved, and purged."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.log
name: "log"
title: "Log"
description: "Tasks that write, fetch, or purge Kestra logs."
body: "Emit structured log messages, retrieve stored logs, or clean up log storage; provide message content or log query filters and consider namespace or execution scoping when purging."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.metric
name: "metric"
title: "Metric"
description: "Tasks that publish custom metrics from flows."
body: "Send counters, gauges, and timing metrics to Kestra's metric store for dashboards and alerts; define the metric name, type, value, labels, and optional timestamp to record meaningful telemetry."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.namespace
name: "namespace"
title: "Namespace"
description: "Tasks that manage namespace files and versions."
body: "Upload, download, delete, purge, or version files stored in a namespace—useful for shipping assets or configs with flows; set the target namespace, paths or glob patterns, and purge behavior to control stored artifacts."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.output
name: "output"
title: "Output"
description: "Tasks that expose outputs from a flow."
body: "Use OutputValues to publish key-value outputs for downstream tasks or subflows; declare the output map and data types that consuming tasks should read."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.runner
name: "runner"
title: "Runner"
description: "Tasks that execute commands on the Kestra worker."
body: "Run shell processes with configurable command, environment, working directory, and input/output handling; ensure commands are idempotent and set expected exit codes or resource needs when invoking external binaries."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.storage
name: "storage"
title: "Storage"
description: "Tasks that manipulate files in Kestra's internal storage."
body: "Write, delete, concatenate, split, deduplicate, filter, reverse, size, or list files used by executions; provide source and target storage URIs and any encoding or line-handling options to transform stored data safely."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.templating
name: "templating"
title: "Templating"
description: "Tasks that render dynamic task specifications from templates."
body: "TemplatedTask lets you supply a Pebble-rendered YAML spec that is parsed and executed at runtime; provide the `spec` property with a valid runnable task definition and avoid recursive templating when composing dynamic tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.trigger
name: "trigger"
title: "Trigger"
description: "Tasks that start flows from schedules or events."
body: "Define cron-based schedules, specific date triggers, webhooks, namespace flow triggers, or toggles; set required properties like cron expressions, webhook secrets, and target flow references to control when executions fire."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,14 +1,16 @@
package io.kestra.core.contexts; package io.kestra.core.contexts;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest; import io.kestra.core.junit.annotations.KestraTest;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Map;
import java.util.Optional; import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest @KestraTest
class KestraContextTest { class KestraContextTest {
@Inject @Inject

View File

@@ -1,7 +1,8 @@
package io.kestra.core.contexts; package io.kestra.core.contexts;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest; import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@@ -9,7 +10,7 @@ import java.util.List;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest(environments = "maven") @KestraTest(environments = "maven")
class MavenPluginRepositoryConfigTest { class MavenPluginRepositoryConfigTest {
@Inject @Inject

View File

@@ -10,7 +10,7 @@ import io.kestra.plugin.core.debug.Return;
import io.kestra.plugin.core.flow.Dag; import io.kestra.plugin.core.flow.Dag;
import io.kestra.plugin.core.flow.Subflow; import io.kestra.plugin.core.flow.Subflow;
import io.kestra.plugin.core.state.Set; import io.kestra.plugin.core.state.Set;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest; import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@@ -26,7 +26,7 @@ import org.junit.jupiter.api.parallel.ExecutionMode;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest @KestraTest
@Execution(ExecutionMode.SAME_THREAD) @Execution(ExecutionMode.SAME_THREAD)
class DocumentationGeneratorTest { class DocumentationGeneratorTest {
@Inject @Inject

View File

@@ -170,11 +170,10 @@ class JsonSchemaGeneratorTest {
Map<String, Object> jsonSchema = jsonSchemaGenerator.generate(AbstractTrigger.class, AbstractTrigger.class); Map<String, Object> jsonSchema = jsonSchemaGenerator.generate(AbstractTrigger.class, AbstractTrigger.class);
assertThat((Map<String, Object>) jsonSchema.get("properties"), allOf( assertThat((Map<String, Object>) jsonSchema.get("properties"), allOf(
Matchers.aMapWithSize(4), Matchers.aMapWithSize(3),
hasKey("conditions"), hasKey("conditions"),
hasKey("stopAfter"), hasKey("stopAfter"),
hasKey("type"), hasKey("type")
hasKey("allowConcurrent")
)); ));
}); });
} }

View File

@@ -2,7 +2,6 @@ package io.kestra.core.models;
import io.kestra.core.junit.annotations.KestraTest; import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.validations.ModelValidator; import io.kestra.core.models.validations.ModelValidator;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException; import jakarta.validation.ConstraintViolationException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@@ -13,7 +12,7 @@ import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest @KestraTest
class LabelTest { class LabelTest {
@Inject @Inject
private ModelValidator modelValidator; private ModelValidator modelValidator;
@@ -135,47 +134,4 @@ class LabelTest {
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar")); Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
assertThat(emptyKeyLabelResult.isPresent()).isTrue(); assertThat(emptyKeyLabelResult.isPresent()).isTrue();
} }
@Test
void shouldValidateValidLabelKeys() {
// Valid keys: start with lowercase; may contain letters, numbers, hyphens, underscores, periods
assertThat(modelValidator.isValid(new Label("foo", "bar")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo_bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar_baz123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("a", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo.bar", "value")).isPresent()).isFalse(); // dot is allowed
}
@Test
void shouldRejectInvalidLabelKeys() {
Optional<ConstraintViolationException> spaceResult = modelValidator.isValid(new Label("foo bar", "value"));
assertThat(spaceResult.isPresent()).isTrue();
Optional<ConstraintViolationException> uppercaseResult = modelValidator.isValid(new Label("Foo", "value"));
assertThat(uppercaseResult.isPresent()).isTrue();
Optional<ConstraintViolationException> emojiResult = modelValidator.isValid(new Label("💩", "value"));
assertThat(emojiResult.isPresent()).isTrue();
Optional<ConstraintViolationException> atSignResult = modelValidator.isValid(new Label("foo@bar", "value"));
assertThat(atSignResult.isPresent()).isTrue();
Optional<ConstraintViolationException> colonResult = modelValidator.isValid(new Label("foo:bar", "value"));
assertThat(colonResult.isPresent()).isTrue();
Optional<ConstraintViolationException> hyphenStartResult = modelValidator.isValid(new Label("-foo", "value"));
assertThat(hyphenStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> underscoreStartResult = modelValidator.isValid(new Label("_foo", "value"));
assertThat(underscoreStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> zeroResult = modelValidator.isValid(new Label("0", "value"));
assertThat(zeroResult.isPresent()).isTrue();
Optional<ConstraintViolationException> digitStartResult = modelValidator.isValid(new Label("9test", "value"));
assertThat(digitStartResult.isPresent()).isTrue();
}
} }

View File

@@ -61,9 +61,6 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION),
@@ -171,6 +168,9 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION),

View File

@@ -185,21 +185,4 @@ class FlowTest {
return YamlParser.parse(file, Flow.class); return YamlParser.parse(file, Flow.class);
} }
@Test
void illegalNamespaceUpdate() {
Flow original = Flow.builder()
.id("my-flow")
.namespace("io.kestra.prod")
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("hello").build()))
.build();
Flow updated = original.toBuilder()
.namespace("io.kestra.dev")
.build();
Optional<ConstraintViolationException> validate = original.validateUpdate(updated);
assertThat(validate.isPresent()).isTrue();
assertThat(validate.get().getMessage()).contains("Illegal namespace update");
}
} }

View File

@@ -1,20 +1,14 @@
package io.kestra.core.models.property; package io.kestra.core.models.property;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.kestra.core.context.TestRunContextFactory; import io.kestra.core.context.TestRunContextFactory;
import io.kestra.core.exceptions.IllegalVariableEvaluationException; import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.serializers.FileSerde; import io.kestra.core.serializers.FileSerde;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.storages.StorageInterface; import io.kestra.core.storages.StorageInterface;
import io.micronaut.core.annotation.Introspected;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException; import jakarta.validation.ConstraintViolationException;
import lombok.Builder; import lombok.Builder;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.slf4j.event.Level; import org.slf4j.event.Level;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
@@ -25,14 +19,13 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT; import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static java.util.Map.entry; import static java.util.Map.entry;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;
@MicronautTest @KestraTest
class PropertyTest { class PropertyTest {
@Inject @Inject
@@ -369,43 +362,10 @@ class PropertyTest {
assertThat(output.getMessages().getFirst().getValue()).isEqualTo("value1"); assertThat(output.getMessages().getFirst().getValue()).isEqualTo("value1");
} }
@Test
void jsonSubtype() throws JsonProcessingException, IllegalVariableEvaluationException {
Optional<WithSubtype> rendered = runContextFactory.of().render(
Property.<WithSubtype>ofExpression(JacksonMapper.ofJson().writeValueAsString(new MySubtype()))
).as(WithSubtype.class);
assertThat(rendered).isPresent();
assertThat(rendered.get()).isInstanceOf(MySubtype.class);
List<WithSubtype> renderedList = runContextFactory.of().render(
Property.<List<WithSubtype>>ofExpression(JacksonMapper.ofJson().writeValueAsString(List.of(new MySubtype())))
).asList(WithSubtype.class);
assertThat(renderedList).hasSize(1);
assertThat(renderedList.get(0)).isInstanceOf(MySubtype.class);
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", visible = true, include = JsonTypeInfo.As.EXISTING_PROPERTY)
@JsonSubTypes({
@JsonSubTypes.Type(value = MySubtype.class, name = "mySubtype")
})
@Getter
@NoArgsConstructor
@Introspected
public abstract static class WithSubtype {
abstract public String getType();
}
@Getter
public static class MySubtype extends WithSubtype {
private final String type = "mySubtype";
}
@Builder @Builder
@Getter @Getter
private static class TestObj { private static class TestObj {
private String key; private String key;
private String value; private String value;
} }
} }

View File

@@ -1,11 +1,12 @@
package io.kestra.core.models.property; package io.kestra.core.models.property;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.runners.*; import io.kestra.core.runners.*;
import io.kestra.core.storages.Namespace; import io.kestra.core.storages.Namespace;
import io.kestra.core.storages.NamespaceFactory; import io.kestra.core.storages.NamespaceFactory;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface; import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
@@ -27,7 +28,7 @@ import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;
@MicronautTest @KestraTest
class URIFetcherTest { class URIFetcherTest {
@Inject @Inject
private StorageInterface storage; private StorageInterface storage;

Some files were not shown because too many files have changed in this diff Show More