Compare commits

..

4 Commits

Author SHA1 Message Date
brian.mulier
811af5c5a1 WIP 2025-12-22 12:10:41 +01:00
brian.mulier
1b73ddd097 WIP 2025-12-22 12:10:41 +01:00
brian.mulier
9803ecc6d0 fix(core): close FlowListeners properly 2025-12-22 12:10:41 +01:00
brian.mulier
b4871fcb15 fix(tests): remove test UncaughtExceptionHandler 2025-12-22 12:10:41 +01:00
145 changed files with 2414 additions and 4359 deletions

View File

@@ -63,9 +63,9 @@ You can also build it from a terminal using `./gradlew build`, the Gradle wrappe
- Configure the following environment variables:
- `MICRONAUT_ENVIRONMENTS`: can be set to any string and will load a custom configuration file in `cli/src/main/resources/application-{env}.yml`.
- `KESTRA_PLUGINS_PATH`: is the path where you will save plugins as Jar and will be load on startup.
- See the screenshot below for an example: ![Intellij IDEA Configuration ](./assets/run-app.png)
- See the screenshot below for an example: ![Intellij IDEA Configuration ](run-app.png)
- If you encounter **JavaScript memory heap out** error during startup, configure `NODE_OPTIONS` environment variable with some large value.
- Example `NODE_OPTIONS: --max-old-space-size=4096` or `NODE_OPTIONS: --max-old-space-size=8192` ![Intellij IDEA Configuration ](./assets/node_option_env_var.png)
- Example `NODE_OPTIONS: --max-old-space-size=4096` or `NODE_OPTIONS: --max-old-space-size=8192` ![Intellij IDEA Configuration ](node_option_env_var.png)
- The server starts by default on port 8080 and is reachable on `http://localhost:8080`
If you want to launch all tests, you need Python and some packages installed on your machine, on Ubuntu you can install them with:

View File

Before

Width:  |  Height:  |  Size: 130 KiB

After

Width:  |  Height:  |  Size: 130 KiB

View File

Before

Width:  |  Height:  |  Size: 210 KiB

After

Width:  |  Height:  |  Size: 210 KiB

View File

@@ -21,7 +21,7 @@ plugins {
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.2.2.6593"
id "org.sonarqube" version "7.2.1.6560"
id 'jacoco-report-aggregation'
// helper
@@ -171,23 +171,13 @@ allprojects {
subprojects {subProj ->
if (subProj.name != 'platform' && subProj.name != 'jmh-benchmarks') {
apply plugin: "com.adarshr.test-logger"
apply plugin: 'jacoco'
java {
sourceCompatibility = targetJavaVersion
targetCompatibility = targetJavaVersion
}
configurations {
agent {
canBeResolved = true
canBeConsumed = true
}
mockitoAgent
}
dependencies {
// Platform
testAnnotationProcessor enforcedPlatform(project(":platform"))
@@ -206,9 +196,6 @@ subprojects {subProj ->
testImplementation "org.junit.jupiter:junit-jupiter-params"
testImplementation "org.junit-pioneer:junit-pioneer"
testImplementation 'org.mockito:mockito-junit-jupiter'
mockitoAgent("org.mockito:mockito-core:5.21.0") {
transitive = false // just the core
}
// hamcrest
testImplementation 'org.hamcrest:hamcrest'
@@ -217,17 +204,9 @@ subprojects {subProj ->
//assertj
testImplementation 'org.assertj:assertj-core'
agent "org.aspectj:aspectjweaver:1.9.25.1"
testImplementation platform("io.qameta.allure:allure-bom")
testImplementation "io.qameta.allure:allure-junit5"
}
def commonTestConfig = { Test t ->
t.ignoreFailures = true
t.finalizedBy jacocoTestReport
// set Xmx for test workers
t.maxHeapSize = '4g'
@@ -253,52 +232,6 @@ subprojects {subProj ->
// }
}
tasks.register('integrationTest', Test) { Test t ->
description = 'Runs integration tests'
group = 'verification'
useJUnitPlatform {
includeTags 'integration'
}
testClassesDirs = sourceSets.test.output.classesDirs
classpath = sourceSets.test.runtimeClasspath
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
// Integration tests typically not parallel (but you can enable)
maxParallelForks = 1
commonTestConfig(t)
}
tasks.register('unitTest', Test) { Test t ->
description = 'Runs unit tests'
group = 'verification'
useJUnitPlatform {
excludeTags 'flaky', 'integration'
}
testClassesDirs = sourceSets.test.output.classesDirs
classpath = sourceSets.test.runtimeClasspath
reports {
junitXml.required = true
junitXml.outputPerTestCase = true
junitXml.mergeReruns = true
junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
commonTestConfig(t)
}
tasks.register('flakyTest', Test) { Test t ->
group = 'verification'
description = 'Runs tests tagged @Flaky but does not fail the build.'
@@ -306,6 +239,7 @@ subprojects {subProj ->
useJUnitPlatform {
includeTags 'flaky'
}
ignoreFailures = true
reports {
junitXml.required = true
@@ -315,13 +249,10 @@ subprojects {subProj ->
junitXml.outputLocation = layout.buildDirectory.dir("test-results/flakyTest")
}
commonTestConfig(t)
}
// test task (default)
tasks.named('test', Test) { Test t ->
group = 'verification'
description = 'Runs all non-flaky tests.'
test {
useJUnitPlatform {
excludeTags 'flaky'
}
@@ -332,15 +263,10 @@ subprojects {subProj ->
junitXml.includeSystemErrLog = true
junitXml.outputLocation = layout.buildDirectory.dir("test-results/test")
}
commonTestConfig(t)
jvmArgs = [
"-javaagent:${configurations.agent.singleFile}",
"-javaagent:${configurations.mockitoAgent.singleFile}"
]
}
commonTestConfig(it)
tasks.named('check') {
dependsOn(tasks.named('test'))// default behaviour
finalizedBy(tasks.named('flakyTest'))
}
testlogger {
@@ -356,25 +282,83 @@ subprojects {subProj ->
}
}
/**********************************************************************************************************************\
* End-to-End Tests
**********************************************************************************************************************/
def e2eTestsCheck = tasks.register('e2eTestsCheck') {
group = 'verification'
description = "Runs the 'check' task for all e2e-tests modules"
doFirst {
project.ext.set("e2e-tests", true)
}
}
subprojects {
// Add e2e-tests modules check tasks to e2eTestsCheck
if (project.name.startsWith("e2e-tests")) {
test {
onlyIf {
project.hasProperty("e2e-tests")
}
}
}
afterEvaluate {
// Add e2e-tests modules check tasks to e2eTestsCheck
if (project.name.startsWith("e2e-tests")) {
e2eTestsCheck.configure {
finalizedBy(check)
}
}
}
}
/**********************************************************************************************************************\
* Allure Reports
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
dependencies {
testImplementation platform("io.qameta.allure:allure-bom")
testImplementation "io.qameta.allure:allure-junit5"
}
configurations {
agent {
canBeResolved = true
canBeConsumed = true
}
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.25.1"
}
test {
jvmArgs = ["-javaagent:${configurations.agent.singleFile}"]
}
}
}
/**********************************************************************************************************************\
* Jacoco
**********************************************************************************************************************/
subprojects {
if (it.name != 'platform' && it.name != 'jmh-benchmarks') {
apply plugin: 'jacoco'
test {
finalizedBy jacocoTestReport
}
jacocoTestReport {
dependsOn test
}
}
}
tasks.named('check') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
finalizedBy jacocoTestReport
}
tasks.register('unitTest') {
// No jacocoTestReport here, because it depends by default on :test,
// and that would make :test being run twice in our CI.
// In practice the report will be generated later in the CI by :check.
}
tasks.register('integrationTest') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
finalizedBy jacocoTestReport
}
tasks.register('flakyTest') {
dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
finalizedBy jacocoTestReport
}
tasks.named('testCodeCoverageReport') {

View File

@@ -81,7 +81,7 @@ public class MetadataMigrationService {
}));
}
public void nsFilesMigration(boolean verbose) throws IOException {
public void nsFilesMigration() throws IOException {
this.namespacesPerTenant().entrySet().stream()
.flatMap(namespacesForTenant -> namespacesForTenant.getValue().stream().map(namespace -> Map.entry(namespacesForTenant.getKey(), namespace)))
.flatMap(throwFunction(namespaceForTenant -> {
@@ -92,9 +92,6 @@ public class MetadataMigrationService {
.forEach(throwConsumer(nsFileMetadata -> {
if (namespaceFileMetadataRepository.findByPath(nsFileMetadata.getTenantId(), nsFileMetadata.getNamespace(), nsFileMetadata.getPath()).isEmpty()) {
namespaceFileMetadataRepository.save(nsFileMetadata);
if (verbose) {
System.out.println("Migrated namespace file metadata: " + nsFileMetadata.getNamespace() + " - " + nsFileMetadata.getPath());
}
}
}));
}

View File

@@ -15,14 +15,11 @@ public class NsFilesMetadataMigrationCommand extends AbstractCommand {
@Inject
private Provider<MetadataMigrationService> metadataMigrationServiceProvider;
@CommandLine.Option(names = {"-lm", "--log-migrations"}, description = "Log all files that are migrated", defaultValue = "false")
public boolean logMigrations = false;
@Override
public Integer call() throws Exception {
super.call();
try {
metadataMigrationServiceProvider.get().nsFilesMigration(logMigrations);
metadataMigrationServiceProvider.get().nsFilesMigration();
} catch (Exception e) {
System.err.println("❌ Namespace Files Metadata migration failed: " + e.getMessage());
e.printStackTrace();

View File

@@ -4,6 +4,7 @@ import io.micronaut.configuration.picocli.PicocliRunner;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.env.Environment;
import org.apache.commons.io.FileUtils;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.File;
@@ -14,6 +15,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
@@ -23,8 +25,7 @@ class PluginDocCommandTest {
@Test
void run() throws IOException, URISyntaxException {
var testDirectoryName = PluginListCommandTest.class.getSimpleName();
Path pluginsPath = Files.createTempDirectory(testDirectoryName + "_pluginsPath_");
Path pluginsPath = Files.createTempDirectory(PluginListCommandTest.class.getSimpleName());
pluginsPath.toFile().deleteOnExit();
FileUtils.copyFile(
@@ -33,7 +34,7 @@ class PluginDocCommandTest {
new File(URI.create("file://" + pluginsPath.toAbsolutePath() + "/" + PLUGIN_TEMPLATE_TEST))
);
Path docPath = Files.createTempDirectory(testDirectoryName + "_docPath_");
Path docPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName());
docPath.toFile().deleteOnExit();
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
@@ -42,9 +43,9 @@ class PluginDocCommandTest {
List<Path> files = Files.list(docPath).toList();
assertThat(files.stream().map(path -> path.getFileName().toString())).contains("plugin-template-test");
// don't know why, but sometimes there is an addition "plugin-notifications" directory present
var directory = files.stream().filter(path -> "plugin-template-test".equals(path.getFileName().toString())).findFirst().get().toFile();
assertThat(files.size()).isEqualTo(1);
assertThat(files.getFirst().getFileName().toString()).isEqualTo("plugin-template-test");
var directory = files.getFirst().toFile();
assertThat(directory.isDirectory()).isTrue();
assertThat(directory.listFiles().length).isEqualTo(3);

View File

@@ -25,7 +25,7 @@ dependencies {
api "io.projectreactor:reactor-core"
// awaitility
api "org.awaitility:awaitility"
api 'org.awaitility:awaitility'
// micronaut
api "io.micronaut.data:micronaut-data-model"

View File

@@ -1,25 +0,0 @@
package io.kestra.core.assets;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.runners.AssetEmitter;
import jakarta.inject.Singleton;
import java.util.ArrayList;
import java.util.List;
@Singleton
public class AssetManagerFactory {
public AssetEmitter of(boolean enabled) {
return new AssetEmitter() {
@Override
public void upsert(Asset asset) {
throw new UnsupportedOperationException();
}
@Override
public List<Asset> outputs() {
return new ArrayList<>();
}
};
}
}

View File

@@ -1,31 +0,0 @@
package io.kestra.core.assets;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.assets.AssetIdentifier;
import io.kestra.core.models.assets.AssetUser;
import io.kestra.core.queues.QueueException;
import io.micronaut.context.annotation.Secondary;
import jakarta.inject.Singleton;
import java.util.List;
public interface AssetService {
void asyncUpsert(AssetUser assetUser, Asset asset) throws QueueException;
void assetLineage(AssetUser assetUser, List<AssetIdentifier> inputs, List<AssetIdentifier> outputs) throws QueueException;
@Singleton
@Secondary
class NoopAssetService implements AssetService {
@Override
public void asyncUpsert(AssetUser assetUser, Asset asset) throws QueueException {
// no-op
}
@Override
public void assetLineage(AssetUser assetUser, List<AssetIdentifier> inputs, List<AssetIdentifier> outputs) {
// no-op
}
}
}

View File

@@ -22,7 +22,6 @@ import com.github.victools.jsonschema.module.swagger2.Swagger2Module;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.dashboards.DataFilter;
@@ -64,7 +63,7 @@ import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
@Singleton
@Slf4j
public class JsonSchemaGenerator {
private static final List<Class<?>> TYPES_RESOLVED_AS_STRING = List.of(Duration.class, LocalTime.class, LocalDate.class, LocalDateTime.class, ZonedDateTime.class, OffsetDateTime.class, OffsetTime.class);
private static final List<Class<?>> SUBTYPE_RESOLUTION_EXCLUSION_FOR_PLUGIN_SCHEMA = List.of(Task.class, AbstractTrigger.class);
@@ -277,10 +276,10 @@ public class JsonSchemaGenerator {
.with(Option.DEFINITION_FOR_MAIN_SCHEMA)
.with(Option.PLAIN_DEFINITION_KEYS)
.with(Option.ALLOF_CLEANUP_AT_THE_END);
// HACK: Registered a custom JsonUnwrappedDefinitionProvider prior to the JacksonModule
// HACK: Registered a custom JsonUnwrappedDefinitionProvider prior to the JacksonModule
// to be able to return an CustomDefinition with an empty node when the ResolvedType can't be found.
builder.forTypesInGeneral().withCustomDefinitionProvider(new JsonUnwrappedDefinitionProvider() {
builder.forTypesInGeneral().withCustomDefinitionProvider(new JsonUnwrappedDefinitionProvider(){
@Override
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
try {
@@ -322,7 +321,7 @@ public class JsonSchemaGenerator {
// inline some type
builder.forTypesInGeneral()
.withCustomDefinitionProvider(new CustomDefinitionProviderV2() {
@Override
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
if (javaType.isInstanceOf(Map.class) || javaType.isInstanceOf(Enum.class)) {
@@ -590,8 +589,7 @@ public class JsonSchemaGenerator {
// The `const` property is used by editors for auto-completion based on that schema.
builder.forTypesInGeneral().withTypeAttributeOverride((collectedTypeAttributes, scope, context) -> {
final Class<?> pluginType = scope.getType().getErasedType();
Plugin pluginAnnotation = pluginType.getAnnotation(Plugin.class);
if (pluginAnnotation != null) {
if (pluginType.getAnnotation(Plugin.class) != null) {
ObjectNode properties = (ObjectNode) collectedTypeAttributes.get("properties");
if (properties != null) {
properties.set("type", context.getGeneratorConfig().createObjectNode()
@@ -766,14 +764,6 @@ public class JsonSchemaGenerator {
consumer.accept(typeContext.resolve(clz));
}
}).toList();
} else if (declaredType.getErasedType() == Asset.class) {
return getRegisteredPlugins()
.stream()
.flatMap(registeredPlugin -> registeredPlugin.getAssets().stream())
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
.map(typeContext::resolve)
.toList();
}
return null;

View File

@@ -103,48 +103,12 @@ public record QueryFilter(
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.IN, Op.NOT_IN, Op.CONTAINS);
}
},
METADATA("metadata") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.IN, Op.NOT_IN, Op.CONTAINS);
}
},
FLOW_ID("flowId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
}
},
FLOW_REVISION("flowRevision") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
}
},
ID("id") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
}
},
ASSET_ID("assetId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
}
},
TYPE("type") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
}
},
CREATED("created") {
@Override
public List<Op> supportedOp() {
return List.of(Op.GREATER_THAN_OR_EQUAL_TO, Op.GREATER_THAN, Op.LESS_THAN_OR_EQUAL_TO, Op.LESS_THAN, Op.EQUALS, Op.NOT_EQUALS);
}
},
UPDATED("updated") {
@Override
public List<Op> supportedOp() {
@@ -187,30 +151,12 @@ public record QueryFilter(
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
}
},
TRIGGER_STATE("triggerState"){
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS);
}
},
EXECUTION_ID("executionId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
}
},
TASK_ID("taskId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
}
},
TASK_RUN_ID("taskRunId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.IN, Op.NOT_IN);
}
},
CHILD_FILTER("childFilter") {
@Override
public List<Op> supportedOp() {
@@ -325,7 +271,7 @@ public record QueryFilter(
@Override
public List<Field> supportedField() {
return List.of(Field.QUERY, Field.SCOPE, Field.NAMESPACE, Field.WORKER_ID, Field.FLOW_ID,
Field.START_DATE, Field.END_DATE, Field.TRIGGER_ID, Field.TRIGGER_STATE
Field.START_DATE, Field.END_DATE, Field.TRIGGER_ID
);
}
},
@@ -360,34 +306,6 @@ public record QueryFilter(
Field.UPDATED
);
}
},
ASSET {
@Override
public List<Field> supportedField() {
return List.of(
Field.QUERY,
Field.ID,
Field.TYPE,
Field.NAMESPACE,
Field.METADATA,
Field.UPDATED
);
}
},
ASSET_USAGE {
@Override
public List<Field> supportedField() {
return List.of(
Field.ASSET_ID,
Field.NAMESPACE,
Field.FLOW_ID,
Field.FLOW_REVISION,
Field.EXECUTION_ID,
Field.TASK_ID,
Field.TASK_RUN_ID,
Field.CREATED
);
}
};
public abstract List<Field> supportedField();

View File

@@ -1,111 +0,0 @@
package io.kestra.core.models.assets;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.Plugin;
import io.kestra.core.utils.IdUtils;
import io.swagger.v3.oas.annotations.Hidden;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Pattern;
import jakarta.validation.constraints.Size;
import lombok.Getter;
import lombok.NoArgsConstructor;
import java.time.Instant;
import java.util.*;
@Getter
@NoArgsConstructor
public abstract class Asset implements HasUID, DeletedInterface, Plugin {
@Hidden
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
protected String tenantId;
@Pattern(regexp = "^[a-z0-9][a-z0-9._-]*")
@Size(min = 1, max = 150)
protected String namespace;
@NotBlank
@Pattern(regexp = "^[a-zA-Z0-9][a-zA-Z0-9._-]*")
@Size(min = 1, max = 150)
protected String id;
@NotBlank
protected String type;
protected String displayName;
protected String description;
protected Map<String, Object> metadata;
@Nullable
@Hidden
private Instant created;
@Nullable
@Hidden
private Instant updated;
@Hidden
private boolean deleted;
public Asset(
String tenantId,
String namespace,
String id,
String type,
String displayName,
String description,
Map<String, Object> metadata,
Instant created,
Instant updated,
boolean deleted
) {
this.tenantId = tenantId;
this.namespace = namespace;
this.id = id;
this.type = type;
this.displayName = displayName;
this.description = description;
this.metadata = Optional.ofNullable(metadata).map(HashMap::new).orElse(new HashMap<>());
Instant now = Instant.now();
this.created = Optional.ofNullable(created).orElse(now);
this.updated = Optional.ofNullable(updated).orElse(now);
this.deleted = deleted;
}
public <T extends Asset> T toUpdated() {
if (this.created == null) {
this.created = Instant.now();
}
this.updated = Instant.now();
return (T) this;
}
public Asset toDeleted() {
this.deleted = true;
return this;
}
@JsonAnySetter
public void setMetadata(String name, Object value) {
metadata.put(name, value);
}
@Override
public String uid() {
return Asset.uid(tenantId, id);
}
public static String uid(String tenantId, String id) {
return IdUtils.fromParts(tenantId, id);
}
public Asset withTenantId(String tenantId) {
this.tenantId = tenantId;
return this;
}
}

View File

@@ -1,19 +0,0 @@
package io.kestra.core.models.assets;
import io.kestra.core.utils.IdUtils;
import io.swagger.v3.oas.annotations.Hidden;
public record AssetIdentifier(@Hidden String tenantId, @Hidden String namespace, String id){
public AssetIdentifier withTenantId(String tenantId) {
return new AssetIdentifier(tenantId, this.namespace, this.id);
}
public String uid() {
return IdUtils.fromParts(tenantId, id);
}
public static AssetIdentifier of(Asset asset) {
return new AssetIdentifier(asset.getTenantId(), asset.getNamespace(), asset.getId());
}
}

View File

@@ -1,18 +0,0 @@
package io.kestra.core.models.assets;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.utils.IdUtils;
/**
* Represents an entity that used an asset
*/
public record AssetUser(String tenantId, String namespace, String flowId, Integer flowRevision, String executionId, String taskId, String taskRunId) implements HasUID {
public String uid() {
return IdUtils.fromParts(tenantId, namespace, flowId, String.valueOf(flowRevision), executionId, taskRunId);
}
public FlowId toFlowId() {
return FlowId.of(tenantId, namespace, flowId, flowRevision);
}
}

View File

@@ -1,22 +0,0 @@
package io.kestra.core.models.assets;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.micronaut.core.annotation.Introspected;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Optional;
@Getter
public class AssetsDeclaration extends AssetsInOut {
private boolean enableAuto;
@JsonCreator
public AssetsDeclaration(Boolean enableAuto, List<AssetIdentifier> inputs, List<Asset> outputs) {
super(inputs, outputs);
this.enableAuto = Optional.ofNullable(enableAuto).orElse(false);
}
}

View File

@@ -1,21 +0,0 @@
package io.kestra.core.models.assets;
import com.fasterxml.jackson.annotation.JsonCreator;
import lombok.Getter;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
@Getter
public class AssetsInOut {
private List<AssetIdentifier> inputs;
private List<Asset> outputs;
@JsonCreator
public AssetsInOut(List<AssetIdentifier> inputs, List<Asset> outputs) {
this.inputs = Optional.ofNullable(inputs).orElse(Collections.emptyList());
this.outputs = Optional.ofNullable(outputs).orElse(Collections.emptyList());
}
}

View File

@@ -1,32 +0,0 @@
package io.kestra.core.models.assets;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.kestra.core.models.annotations.Plugin;
import io.swagger.v3.oas.annotations.Hidden;
import lombok.Builder;
import lombok.NoArgsConstructor;
import java.time.Instant;
import java.util.Map;
@NoArgsConstructor
@Plugin
@Hidden
public class Custom extends Asset {
@Builder
@JsonCreator
public Custom(
String tenantId,
String namespace,
String id,
String type,
String displayName,
String description,
Map<String, Object> metadata,
Instant created,
Instant updated,
boolean deleted
) {
super(tenantId, namespace, id, type, displayName, description, metadata, created, updated, deleted);
}
}

View File

@@ -1,32 +0,0 @@
package io.kestra.core.models.assets;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.kestra.core.models.annotations.Plugin;
import io.swagger.v3.oas.annotations.Hidden;
import lombok.Builder;
import lombok.NoArgsConstructor;
import java.time.Instant;
import java.util.Map;
@NoArgsConstructor
@Plugin
public class External extends Asset {
public static final String ASSET_TYPE = External.class.getName();
@Builder
@JsonCreator
public External(
String tenantId,
String namespace,
String id,
String displayName,
String description,
Map<String, Object> metadata,
Instant created,
Instant updated,
boolean deleted
) {
super(tenantId, namespace, id, ASSET_TYPE, displayName, description, metadata, created, updated, deleted);
}
}

View File

@@ -2,7 +2,6 @@ package io.kestra.core.models.executions;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.assets.AssetsInOut;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.models.tasks.retrys.AbstractRetry;
@@ -58,10 +57,6 @@ public class TaskRun implements TenantInterface {
@Schema(implementation = Object.class)
Variables outputs;
@With
@Nullable
AssetsInOut assets;
@NotNull
State state;
@@ -92,7 +87,6 @@ public class TaskRun implements TenantInterface {
this.value,
this.attempts,
this.outputs,
this.assets,
this.state.withState(state),
this.iteration,
this.dynamic,
@@ -120,7 +114,6 @@ public class TaskRun implements TenantInterface {
this.value,
newAttempts,
this.outputs,
this.assets,
this.state.withState(state),
this.iteration,
this.dynamic,
@@ -144,7 +137,6 @@ public class TaskRun implements TenantInterface {
this.value,
newAttempts,
this.outputs,
this.assets,
this.state.withState(State.Type.FAILED),
this.iteration,
this.dynamic,
@@ -164,7 +156,6 @@ public class TaskRun implements TenantInterface {
.value(this.getValue())
.attempts(this.getAttempts())
.outputs(this.getOutputs())
.assets(this.getAssets())
.state(state == null ? this.getState() : state)
.iteration(this.getIteration())
.build();
@@ -194,11 +185,15 @@ public class TaskRun implements TenantInterface {
}
public TaskRunAttempt lastAttempt() {
if (this.attempts == null || this.attempts.isEmpty()) {
if (this.attempts == null) {
return null;
}
return this.attempts.getLast();
return this
.attempts
.stream()
.reduce((a, b) -> b)
.orElse(null);
}
public TaskRun onRunningResend() {
@@ -247,7 +242,6 @@ public class TaskRun implements TenantInterface {
", parentTaskRunId=" + this.getParentTaskRunId() +
", state=" + this.getState().getCurrent().toString() +
", outputs=" + this.getOutputs() +
", assets=" + this.getAssets() +
", attempts=" + this.getAttempts() +
")";
}
@@ -270,7 +264,8 @@ public class TaskRun implements TenantInterface {
* @return The next retry date, null if maxAttempt || maxDuration is reached
*/
public Instant nextRetryDate(AbstractRetry retry, Execution execution) {
if (this.attempts == null || this.attempts.isEmpty() || retry.getMaxAttempts() != null && execution.getMetadata().getAttemptNumber() >= retry.getMaxAttempts()) {
if (retry.getMaxAttempts() != null && execution.getMetadata().getAttemptNumber() >= retry.getMaxAttempts()) {
return null;
}
Instant base = this.lastAttempt().getState().maxDate();

View File

@@ -148,11 +148,6 @@ public class State {
return this.current.isTerminated();
}
@JsonIgnore
public boolean canBeRestarted() {
return this.current.isTerminated() || this.current.isPaused();
}
@JsonIgnore
public boolean isTerminatedNoFail() {
return this.current.isTerminatedNoFail();

View File

@@ -5,13 +5,11 @@ import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.assets.AssetsDeclaration;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.runners.RunContext;
import io.kestra.plugin.core.flow.WorkingDirectory;
import jakarta.annotation.Nullable;
import jakarta.validation.Valid;
import jakarta.validation.constraints.Size;
import lombok.Builder;
@@ -80,11 +78,6 @@ abstract public class Task implements TaskInterface {
@Valid
private Cache taskCache;
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
@Valid
@Nullable
private Property<AssetsDeclaration> assets;
public Optional<Task> findById(String id) {
if (this.getId().equals(id)) {
return Optional.of(this);

View File

@@ -1,13 +1,10 @@
package io.kestra.core.models.tasks.runners;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.AssetEmitter;
import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.JacksonMapper;
import jakarta.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.event.Level;
import org.slf4j.spi.LoggingEventBuilder;
@@ -21,7 +18,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static io.kestra.core.runners.RunContextLogger.ORIGINAL_TIMESTAMP_KEY;
import static io.kestra.core.utils.Rethrow.throwConsumer;
/**
* Service for matching and capturing structured data from task execution logs.
@@ -80,18 +76,6 @@ public class TaskLogLineMatcher {
}
});
}
if (match.assets() != null) {
try {
AssetEmitter assetEmitter = runContext.assets();
match.assets().forEach(throwConsumer(assetEmitter::upsert));
} catch (IllegalVariableEvaluationException e) {
logger.warn("Unable to get asset emitter for log '{}'", data, e);
} catch (QueueException e) {
logger.warn("Unable to emit asset for log '{}'", data, e);
}
}
return match;
}
@@ -110,9 +94,8 @@ public class TaskLogLineMatcher {
public record TaskLogMatch(
Map<String, Object> outputs,
List<AbstractMetricEntry<?>> metrics,
List<LogLine> logs,
List<Asset> assets
) {
List<LogLine> logs
) {
@Override
public Map<String, Object> outputs() {
return Optional.ofNullable(outputs).orElse(Map.of());

View File

@@ -6,10 +6,8 @@ import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.assets.AssetsDeclaration;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.WorkerGroup;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
@@ -90,9 +88,6 @@ abstract public class AbstractTrigger implements TriggerInterface {
)
private boolean allowConcurrent = false;
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
private Property<AssetsDeclaration> assets;
/**
* For backward compatibility: we rename minLogLevel to logLevel.
* @deprecated use {@link #logLevel} instead

View File

@@ -1,7 +1,6 @@
package io.kestra.core.plugins;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.assets.Asset;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotNull;
import org.slf4j.Logger;

View File

@@ -2,7 +2,6 @@ package io.kestra.core.plugins;
import com.fasterxml.jackson.databind.module.SimpleModule;
import io.kestra.core.app.AppPluginInterface;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.dashboards.DataFilter;
import io.kestra.core.models.dashboards.DataFilterKPI;
@@ -12,7 +11,6 @@ import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.logs.LogExporter;
import io.kestra.core.models.tasks.runners.TaskRunner;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.plugins.serdes.AssetDeserializer;
import io.kestra.core.plugins.serdes.PluginDeserializer;
import io.kestra.core.secret.SecretPluginInterface;
import io.kestra.core.storages.StorageInterface;
@@ -47,6 +45,5 @@ public class PluginModule extends SimpleModule {
addDeserializer(SecretPluginInterface.class, new PluginDeserializer<>());
addDeserializer(AppPluginInterface.class, new PluginDeserializer<>());
addDeserializer(LogExporter.class, new PluginDeserializer<>());
addDeserializer(Asset.class, new AssetDeserializer());
}
}

View File

@@ -3,7 +3,6 @@ package io.kestra.core.plugins;
import io.kestra.core.app.AppBlockInterface;
import io.kestra.core.app.AppPluginInterface;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.dashboards.DataFilter;
import io.kestra.core.models.dashboards.DataFilterKPI;
@@ -109,7 +108,6 @@ public class PluginScanner {
List<Class<? extends StorageInterface>> storages = new ArrayList<>();
List<Class<? extends SecretPluginInterface>> secrets = new ArrayList<>();
List<Class<? extends TaskRunner<?>>> taskRunners = new ArrayList<>();
List<Class<? extends Asset>> assets = new ArrayList<>();
List<Class<? extends AppPluginInterface>> apps = new ArrayList<>();
List<Class<? extends AppBlockInterface>> appBlocks = new ArrayList<>();
List<Class<? extends Chart<?>>> charts = new ArrayList<>();
@@ -157,10 +155,6 @@ public class PluginScanner {
//noinspection unchecked
taskRunners.add((Class<? extends TaskRunner<?>>) runner.getClass());
}
case Asset asset -> {
log.debug("Loading Asset plugin: '{}'", plugin.getClass());
assets.add(asset.getClass());
}
case AppPluginInterface app -> {
log.debug("Loading App plugin: '{}'", plugin.getClass());
apps.add(app.getClass());
@@ -229,7 +223,6 @@ public class PluginScanner {
.conditions(conditions)
.storages(storages)
.secrets(secrets)
.assets(assets)
.apps(apps)
.appBlocks(appBlocks)
.taskRunners(taskRunners)

View File

@@ -3,7 +3,6 @@ package io.kestra.core.plugins;
import io.kestra.core.app.AppBlockInterface;
import io.kestra.core.app.AppPluginInterface;
import io.kestra.core.models.annotations.PluginSubGroup;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.dashboards.DataFilter;
import io.kestra.core.models.dashboards.DataFilterKPI;
@@ -40,7 +39,6 @@ public class RegisteredPlugin {
public static final String STORAGES_GROUP_NAME = "storages";
public static final String SECRETS_GROUP_NAME = "secrets";
public static final String TASK_RUNNERS_GROUP_NAME = "task-runners";
public static final String ASSETS_GROUP_NAME = "assets";
public static final String APPS_GROUP_NAME = "apps";
public static final String APP_BLOCKS_GROUP_NAME = "app-blocks";
public static final String CHARTS_GROUP_NAME = "charts";
@@ -58,7 +56,6 @@ public class RegisteredPlugin {
private final List<Class<? extends StorageInterface>> storages;
private final List<Class<? extends SecretPluginInterface>> secrets;
private final List<Class<? extends TaskRunner<?>>> taskRunners;
private final List<Class<? extends Asset>> assets;
private final List<Class<? extends AppPluginInterface>> apps;
private final List<Class<? extends AppBlockInterface>> appBlocks;
private final List<Class<? extends Chart<?>>> charts;
@@ -77,7 +74,6 @@ public class RegisteredPlugin {
!storages.isEmpty() ||
!secrets.isEmpty() ||
!taskRunners.isEmpty() ||
!assets.isEmpty() ||
!apps.isEmpty() ||
!appBlocks.isEmpty() ||
!charts.isEmpty() ||
@@ -149,10 +145,6 @@ public class RegisteredPlugin {
return AppPluginInterface.class;
}
if (this.getAssets().stream().anyMatch(r -> r.getName().equals(cls))) {
return Asset.class;
}
if (this.getLogExporters().stream().anyMatch(r -> r.getName().equals(cls))) {
return LogExporter.class;
}
@@ -188,7 +180,6 @@ public class RegisteredPlugin {
result.put(STORAGES_GROUP_NAME, Arrays.asList(this.getStorages().toArray(Class[]::new)));
result.put(SECRETS_GROUP_NAME, Arrays.asList(this.getSecrets().toArray(Class[]::new)));
result.put(TASK_RUNNERS_GROUP_NAME, Arrays.asList(this.getTaskRunners().toArray(Class[]::new)));
result.put(ASSETS_GROUP_NAME, Arrays.asList(this.getAssets().toArray(Class[]::new)));
result.put(APPS_GROUP_NAME, Arrays.asList(this.getApps().toArray(Class[]::new)));
result.put(APP_BLOCKS_GROUP_NAME, Arrays.asList(this.getAppBlocks().toArray(Class[]::new)));
result.put(CHARTS_GROUP_NAME, Arrays.asList(this.getCharts().toArray(Class[]::new)));
@@ -368,12 +359,6 @@ public class RegisteredPlugin {
b.append("] ");
}
if (!this.getAssets().isEmpty()) {
b.append("[Assets: ");
b.append(this.getAssets().stream().map(Class::getName).collect(Collectors.joining(", ")));
b.append("] ");
}
if (!this.getApps().isEmpty()) {
b.append("[Apps: ");
b.append(this.getApps().stream().map(Class::getName).collect(Collectors.joining(", ")));

View File

@@ -1,25 +0,0 @@
package io.kestra.core.plugins.notifications;
import io.kestra.core.models.property.Property;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.Map;
public interface ExecutionInterface {
@Schema(
title = "The execution id to use",
description = "Default is the current execution, " +
"change it to {{ trigger.executionId }} if you use this task with a Flow trigger to use the original execution."
)
Property<String> getExecutionId();
@Schema(
title = "Custom fields to be added on notification"
)
Property<Map<String, Object>> getCustomFields();
@Schema(
title = "Custom message to be added on notification"
)
Property<String> getCustomMessage();
}

View File

@@ -1,140 +0,0 @@
package io.kestra.core.plugins.notifications;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.retrys.Exponential;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.utils.RetryUtils;
import io.kestra.core.utils.UriProvider;
import java.time.Duration;
import java.util.*;
public final class ExecutionService {
private ExecutionService() {}
public static Execution findExecution(RunContext runContext, Property<String> executionId) throws IllegalVariableEvaluationException, NoSuchElementException {
ExecutionRepositoryInterface executionRepository = ((DefaultRunContext) runContext).getApplicationContext().getBean(ExecutionRepositoryInterface.class);
RetryUtils.Instance<Execution, NoSuchElementException> retryInstance = RetryUtils
.of(Exponential.builder()
.delayFactor(2.0)
.interval(Duration.ofSeconds(1))
.maxInterval(Duration.ofSeconds(15))
.maxAttempts(-1)
.maxDuration(Duration.ofMinutes(10))
.build(),
runContext.logger()
);
var executionRendererId = runContext.render(executionId).as(String.class).orElse(null);
var flowTriggerExecutionState = getOptionalFlowTriggerExecutionState(runContext);
var flowVars = (Map<String, String>) runContext.getVariables().get("flow");
var isCurrentExecution = isCurrentExecution(runContext, executionRendererId);
if (isCurrentExecution) {
runContext.logger().info("Loading execution data for the current execution.");
}
return retryInstance.run(
NoSuchElementException.class,
() -> executionRepository.findById(flowVars.get("tenantId"), executionRendererId)
.filter(foundExecution -> isExecutionInTheWantedState(foundExecution, isCurrentExecution, flowTriggerExecutionState))
.orElseThrow(() -> new NoSuchElementException("Unable to find execution '" + executionRendererId + "'"))
);
}
/**
* ExecutionRepository can be out of sync in ElasticSearch stack, with this filter we try to mitigate that
*
* @param execution the Execution we fetched from ExecutionRepository
* @param isCurrentExecution true if this *Execution Task is configured to send a notification for the current Execution
* @param flowTriggerExecutionState the Execution State that triggered the Flow trigger, if any
* @return true if we think we fetched the right Execution data for our usecase
*/
public static boolean isExecutionInTheWantedState(Execution execution, boolean isCurrentExecution, Optional<String> flowTriggerExecutionState) {
if (isCurrentExecution) {
// we don't wait for current execution to be terminated as it could not be possible as long as this task is running
return true;
}
if (flowTriggerExecutionState.isPresent()) {
// we were triggered by a Flow trigger that can be, for example: PAUSED
if (flowTriggerExecutionState.get().equals(State.Type.RUNNING.toString())) {
// RUNNING special case: we take the first state we got
return true;
} else {
// to handle the case where the ExecutionRepository is out of sync in ElasticSearch stack,
// we try to match an Execution with the same state
return execution.getState().getCurrent().name().equals(flowTriggerExecutionState.get());
}
} else {
return execution.getState().getCurrent().isTerminated();
}
}
public static Map<String, Object> executionMap(RunContext runContext, ExecutionInterface executionInterface) throws IllegalVariableEvaluationException {
Execution execution = findExecution(runContext, executionInterface.getExecutionId());
UriProvider uriProvider = ((DefaultRunContext) runContext).getApplicationContext().getBean(UriProvider.class);
Map<String, Object> templateRenderMap = new HashMap<>();
templateRenderMap.put("duration", execution.getState().humanDuration());
templateRenderMap.put("startDate", execution.getState().getStartDate());
templateRenderMap.put("link", uriProvider.executionUrl(execution));
templateRenderMap.put("execution", JacksonMapper.toMap(execution));
runContext.render(executionInterface.getCustomMessage())
.as(String.class)
.ifPresent(s -> templateRenderMap.put("customMessage", s));
final Map<String, Object> renderedCustomFields = runContext.render(executionInterface.getCustomFields()).asMap(String.class, Object.class);
if (!renderedCustomFields.isEmpty()) {
templateRenderMap.put("customFields", renderedCustomFields);
}
var isCurrentExecution = isCurrentExecution(runContext, execution.getId());
List<TaskRun> taskRuns;
if (isCurrentExecution) {
taskRuns = execution.getTaskRunList();
} else {
taskRuns = execution.getTaskRunList().stream()
.filter(t -> (execution.hasFailed() ? State.Type.FAILED : State.Type.SUCCESS).equals(t.getState().getCurrent()))
.toList();
}
if (!ListUtils.isEmpty(taskRuns)) {
TaskRun lastTaskRun = taskRuns.getLast();
templateRenderMap.put("firstFailed", State.Type.FAILED.equals(lastTaskRun.getState().getCurrent()) ? lastTaskRun : false);
templateRenderMap.put("lastTask", lastTaskRun);
}
return templateRenderMap;
}
/**
* if there is a state, we assume this is a Flow trigger with type: {@link io.kestra.plugin.core.trigger.Flow.Output}
*
* @return the state of the execution that triggered the Flow trigger, or empty if another usecase/trigger
*/
private static Optional<String> getOptionalFlowTriggerExecutionState(RunContext runContext) {
var triggerVar = Optional.ofNullable(
runContext.getVariables().get("trigger")
);
return triggerVar.map(trigger -> ((Map<String, String>) trigger).get("state"));
}
private static boolean isCurrentExecution(RunContext runContext, String executionId) {
var executionVars = (Map<String, String>) runContext.getVariables().get("execution");
return executionId.equals(executionVars.get("id"));
}
}

View File

@@ -1,16 +0,0 @@
package io.kestra.core.plugins.serdes;
import com.fasterxml.jackson.databind.JsonDeserializer;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.assets.Custom;
/**
* Specific {@link JsonDeserializer} for deserializing {@link Asset}.
*/
public final class AssetDeserializer extends PluginDeserializer<Asset> {
@Override
protected Class<? extends Plugin> fallbackClass() {
return Custom.class;
}
}

View File

@@ -12,7 +12,6 @@ import io.kestra.core.models.dashboards.charts.DataChart;
import io.kestra.core.plugins.DefaultPluginRegistry;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.context.exceptions.NoSuchBeanException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -29,7 +28,7 @@ import java.util.Optional;
* The {@link PluginDeserializer} uses the {@link PluginRegistry} to found the plugin class corresponding to
* a plugin type.
*/
public class PluginDeserializer<T extends Plugin> extends JsonDeserializer<T> {
public final class PluginDeserializer<T extends Plugin> extends JsonDeserializer<T> {
private static final Logger log = LoggerFactory.getLogger(PluginDeserializer.class);
@@ -73,7 +72,7 @@ public class PluginDeserializer<T extends Plugin> extends JsonDeserializer<T> {
// By default, if no plugin-registry is configured retrieve
// the one configured from the static Kestra's context.
pluginRegistry = KestraContext.getContext().getPluginRegistry();
} catch (IllegalStateException | NoSuchBeanException ignore) {
} catch (IllegalStateException ignore) {
// This error can only happen if the KestraContext is not initialized (i.e. in unit tests).
log.error("No plugin registry was initialized. Use default implementation.");
pluginRegistry = DefaultPluginRegistry.getOrCreate();
@@ -93,10 +92,6 @@ public class PluginDeserializer<T extends Plugin> extends JsonDeserializer<T> {
identifier
);
pluginType = pluginRegistry.findClassByIdentifier(identifier);
if (pluginType == null) {
pluginType = fallbackClass();
}
}
if (pluginType == null) {
@@ -157,8 +152,4 @@ public class PluginDeserializer<T extends Plugin> extends JsonDeserializer<T> {
return isVersioningSupported && version != null && !version.isEmpty() ? type + ":" + version : type;
}
protected Class<? extends Plugin> fallbackClass() {
return null;
}
}

View File

@@ -1,12 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.queues.QueueException;
import java.util.List;
public interface AssetEmitter {
void upsert(Asset asset) throws QueueException;
List<Asset> outputs();
}

View File

@@ -6,13 +6,11 @@ import com.google.common.base.CaseFormat;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.assets.AssetsDeclaration;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.assets.AssetManagerFactory;
import io.kestra.core.plugins.PluginConfigurations;
import io.kestra.core.services.KVStoreService;
import io.kestra.core.storages.Storage;
@@ -56,7 +54,6 @@ public class DefaultRunContext extends RunContext {
private MetricRegistry meterRegistry;
private VersionProvider version;
private KVStoreService kvStoreService;
private AssetManagerFactory assetManagerFactory;
private Optional<String> secretKey;
private WorkingDir workingDir;
private Validator validator;
@@ -76,8 +73,6 @@ public class DefaultRunContext extends RunContext {
private Task task;
private AbstractTrigger trigger;
private volatile AssetEmitter assetEmitter;
private final AtomicBoolean isInitialized = new AtomicBoolean(false);
@@ -166,7 +161,6 @@ public class DefaultRunContext extends RunContext {
this.secretKey = applicationContext.getProperty("kestra.encryption.secret-key", String.class);
this.validator = applicationContext.getBean(Validator.class);
this.localPath = applicationContext.getBean(LocalPathFactory.class).createLocalPath(this);
this.assetManagerFactory = applicationContext.getBean(AssetManagerFactory.class);
}
}
@@ -543,23 +537,6 @@ public class DefaultRunContext extends RunContext {
return flow != null ? flow.get("tenantId") : null;
}
/**
* {@inheritDoc}
*/
@Override
public TaskRunInfo taskRunInfo() {
Optional<Map<String, Object>> maybeTaskRunMap = Optional.ofNullable(this.getVariables().get("taskrun"))
.map(Map.class::cast);
return new TaskRunInfo(
(String) this.getVariables().get("executionId"),
(String) this.getVariables().get("taskId"),
maybeTaskRunMap.map(m -> (String) m.get("id"))
.orElse(null),
maybeTaskRunMap.map(m -> (String) m.get("value"))
.orElse(null)
);
}
/**
* {@inheritDoc}
*/
@@ -568,7 +545,12 @@ public class DefaultRunContext extends RunContext {
public FlowInfo flowInfo() {
Map<String, Object> flow = (Map<String, Object>) this.getVariables().get("flow");
// normally only tests should not have the flow variable
return flow == null ? new FlowInfo(null, null, null, null) : FlowInfo.from(flow);
return flow == null ? new FlowInfo(null, null, null, null) : new FlowInfo(
(String) flow.get("tenantId"),
(String) flow.get("namespace"),
(String) flow.get("id"),
(Integer) flow.get("revision")
);
}
/**
@@ -612,25 +594,6 @@ public class DefaultRunContext extends RunContext {
return new AclCheckerImpl(this.applicationContext, flowInfo());
}
@Override
public AssetEmitter assets() throws IllegalVariableEvaluationException {
if (this.assetEmitter == null) {
synchronized (this) {
if (this.assetEmitter == null) {
this.assetEmitter = assetManagerFactory.of(
Optional.ofNullable(task).map(Task::getAssets)
.or(() -> Optional.ofNullable(trigger).map(AbstractTrigger::getAssets))
.flatMap(throwFunction(asset -> this.render(asset).as(AssetsDeclaration.class)))
.map(AssetsDeclaration::isEnableAuto)
.orElse(false)
);
}
}
}
return this.assetEmitter;
}
@Override
public LocalPath localPath() {
return localPath;

View File

@@ -5,6 +5,8 @@ import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.services.PluginDefaultService;
import io.micronaut.context.annotation.Bean;
import jakarta.annotation.PreDestroy;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import io.kestra.core.queues.QueueFactoryInterface;
@@ -15,7 +17,6 @@ import io.kestra.core.services.FlowListenersInterface;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@@ -26,15 +27,14 @@ import jakarta.inject.Singleton;
@Singleton
@Slf4j
public class FlowListeners implements FlowListenersInterface {
private final AtomicBoolean isStarted = new AtomicBoolean(false);
private final QueueInterface<FlowInterface> flowQueue;
private final List<FlowWithSource> flows;
private final List<Consumer<List<FlowWithSource>>> consumers = new ArrayList<>();
private final List<BiConsumer<FlowWithSource, FlowWithSource>> consumersEach = new ArrayList<>();
private final PluginDefaultService pluginDefaultService;
private Runnable queueListenerCancellation;
@Inject
public FlowListeners(
FlowRepositoryInterface flowRepository,
@@ -49,8 +49,9 @@ public class FlowListeners implements FlowListenersInterface {
@Override
public void run() {
synchronized (this) {
if (this.isStarted.compareAndSet(false, true)) {
this.flowQueue.receive(either -> {
if (queueListenerCancellation == null) {
log.info("STARTING FLOW LISTENER: {}", this);
queueListenerCancellation = this.flowQueue.receive(either -> {
FlowWithSource flow;
if (either.isRight()) {
flow = FlowWithException.from(either.getRight().getRecord(), either.getRight(), log).orElse(null);
@@ -154,4 +155,18 @@ public class FlowListeners implements FlowListenersInterface {
// we forced a deep clone to avoid concurrency where instance are changed during iteration (especially scheduler).
return new ArrayList<>(this.flows);
}
@PreDestroy
@Override
public void close() throws Exception {
synchronized (this) {
boolean b = queueListenerCancellation != null;
log.info("THREAD STACKTRACE: {}", (Object) Thread.currentThread().getStackTrace());
log.info("LISTENER NOT NULL : {}", b);
log.info("LISTENER THIS : {}", this);
if (b) {
queueListenerCancellation.run();
}
}
}
}

View File

@@ -143,8 +143,6 @@ public abstract class RunContext implements PropertyContext {
@Deprecated(forRemoval = true)
public abstract String tenantId();
public abstract TaskRunInfo taskRunInfo();
public abstract FlowInfo flowInfo();
/**
@@ -192,19 +190,7 @@ public abstract class RunContext implements PropertyContext {
*/
public abstract LocalPath localPath();
public record TaskRunInfo(String executionId, String taskId, String taskRunId, Object value) {
}
public record FlowInfo(String tenantId, String namespace, String id, Integer revision) {
public static FlowInfo from(Map<String, Object> flowInfoMap) {
return new FlowInfo(
(String) flowInfoMap.get("tenantId"),
(String) flowInfoMap.get("namespace"),
(String) flowInfoMap.get("id"),
(Integer) flowInfoMap.get("revision")
);
}
}
/**
@@ -220,11 +206,6 @@ public abstract class RunContext implements PropertyContext {
*/
public abstract AclChecker acl();
/**
* Get access to the Assets handler.
*/
public abstract AssetEmitter assets() throws IllegalVariableEvaluationException;
/**
* Clone this run context for a specific plugin.
* @return a new run context with the plugin configuration of the given plugin.

View File

@@ -21,7 +21,8 @@ public class WorkerTaskResult implements HasUID {
List<TaskRun> dynamicTaskRuns;
public WorkerTaskResult(TaskRun taskRun) {
this(taskRun, new ArrayList<>(1)); // there are usually very few dynamic task runs, so we init the list with a capacity of 1
this.taskRun = taskRun;
this.dynamicTaskRuns = new ArrayList<>();
}
/**

View File

@@ -1,6 +1,5 @@
package io.kestra.core.runners.pebble.functions;
import com.cronutils.utils.VisibleForTesting;
import io.kestra.core.runners.LocalPath;
import io.kestra.core.runners.LocalPathFactory;
import io.kestra.core.services.NamespaceService;
@@ -156,11 +155,31 @@ abstract class AbstractFileFunction implements Function {
}
private String checkIfFileFromAllowedNamespaceAndReturnIt(URI path, String tenantId, String fromNamespace) {
// Extract namespace from the path, it should be of the form: kestra:///({tenantId}/){namespace}/{flowId}/executions/{executionId}/tasks/{taskId}/{taskRunId}/{fileName}'
// To extract the namespace, we must do it step by step as tenantId, namespace and taskId can contain the words 'executions' and 'tasks'
String namespace = path.toString().substring(KESTRA_SCHEME.length());
if (!EXECUTION_FILE.matcher(namespace).matches()) {
throw new IllegalArgumentException("Unable to read the file '" + path + "' as it is not an execution file");
}
// 1. remove the tenantId if existing
if (tenantId != null) {
namespace = namespace.substring(tenantId.length() + 1);
}
// 2. remove everything after tasks
namespace = namespace.substring(0, namespace.lastIndexOf("/tasks/"));
// 3. remove everything after executions
namespace = namespace.substring(0, namespace.lastIndexOf("/executions/"));
// 4. remove the flowId
namespace = namespace.substring(0, namespace.lastIndexOf('/'));
// 5. replace '/' with '.'
namespace = namespace.replace("/", ".");
String namespace = extractNamespace(path);
namespaceService.checkAllowedNamespace(tenantId, namespace, tenantId, fromNamespace);
return namespace;
}
private String checkEnabledLocalFileAndReturnNamespace(Map<String, Object> args, Map<String, String> flow) {
if (!enableFileProtocol) {
throw new SecurityException("The file:// protocol has been disabled inside the Kestra configuration.");
@@ -181,24 +200,4 @@ abstract class AbstractFileFunction implements Function {
}
return Optional.ofNullable(customNs).orElse(flow.get(NAMESPACE));
}
@VisibleForTesting
String extractNamespace( URI path){
// Extract namespace from the path, it should be of the form: kestra:///{namespace}/{flowId}/executions/{executionId}/tasks/{taskId}/{taskRunId}/{fileName}'
// To extract the namespace, we must do it step by step as namespace and taskId can contain the words 'executions' and 'tasks'
String namespace = path.toString().substring(KESTRA_SCHEME.length());
if (!EXECUTION_FILE.matcher(namespace).matches()) {
throw new IllegalArgumentException("Unable to read the file '" + path + "' as it is not an execution file");
}
// 1. remove everything after tasks
namespace = namespace.substring(0, namespace.lastIndexOf("/tasks/"));
// 2. remove everything after executions
namespace = namespace.substring(0, namespace.lastIndexOf("/executions/"));
// 3. remove the flowId
namespace = namespace.substring(0, namespace.lastIndexOf('/'));
// 4. replace '/' with '.'
namespace = namespace.replace("/", ".");
return namespace;
}
}

View File

@@ -187,7 +187,7 @@ public class ExecutionService {
}
public Execution restart(final Execution execution, @Nullable Integer revision) throws Exception {
if (!execution.getState().canBeRestarted()) {
if (!(execution.getState().isTerminated() || execution.getState().isPaused())) {
throw new IllegalStateException("Execution must be terminated to be restarted, " +
"current state is '" + execution.getState().getCurrent() + "' !"
);

View File

@@ -6,7 +6,7 @@ import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
public interface FlowListenersInterface {
public interface FlowListenersInterface extends AutoCloseable {
void run();
void listen(Consumer<List<FlowWithSource>> consumer);

View File

@@ -1,6 +1,5 @@
package io.kestra.core.test.flow;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
import jakarta.validation.constraints.NotNull;
@@ -9,7 +8,6 @@ import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
@Getter
@@ -27,7 +25,5 @@ public class TaskFixture {
private Map<String, Object> outputs;
private List<Asset> assets;
private Property<String> description;
}

View File

@@ -11,21 +11,17 @@ public final class ThreadUncaughtExceptionHandler implements UncaughtExceptionHa
@Override
public void uncaughtException(Thread t, Throwable e) {
boolean isTest = KestraContext.getContext().getEnvironments().contains("test");
try {
// cannot use FormattingLogger due to a dependency loop
log.error("Caught an exception in {}. {}", t, isTest ? "Keeping it running for test." : "Shutting down.", e);
log.error("Caught an exception in {}. Shutting down.", t, e);
} catch (Throwable errorInLogging) {
// If logging fails, e.g. due to missing memory, at least try to log the
// message and the cause for the failed logging.
System.err.println(e.getMessage());
System.err.println(errorInLogging.getMessage());
} finally {
if (!isTest) {
KestraContext.getContext().shutdown();
Runtime.getRuntime().exit(1);
}
KestraContext.getContext().shutdown();
Runtime.getRuntime().exit(1);
}
}
}

View File

@@ -19,7 +19,15 @@ import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.lang.reflect.Field;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@@ -147,8 +155,6 @@ public class FlowValidator implements ConstraintValidator<FlowValidation, Flow>
.map(task -> task.getId())
.collect(Collectors.toList());
violations.addAll(assetsViolations(allTasks));
if (!invalidTasks.isEmpty()) {
violations.add("Invalid output reference: use outputs[key-name] instead of outputs.key-name — keys with dashes require bracket notation, offending tasks:" +
" [" + String.join(", ", invalidTasks) + "]");
@@ -175,12 +181,6 @@ public class FlowValidator implements ConstraintValidator<FlowValidation, Flow>
}
}
protected List<String> assetsViolations(List<Task> allTasks) {
return allTasks.stream().filter(task -> task.getAssets() != null)
.map(taskWithAssets -> "Task '" + taskWithAssets.getId() + "' can't have any `assets` because assets are only available in Enterprise Edition.")
.toList();
}
private static boolean checkObjectFieldsWithPatterns(Object object, List<Pattern> patterns) {
if (object == null) {
return true;

View File

@@ -15,7 +15,6 @@ import lombok.experimental.SuperBuilder;
import java.util.List;
import java.util.Map;
import java.util.Collections;
@SuperBuilder
@ToString
@@ -63,24 +62,14 @@ public class SetVariables extends Task implements ExecutionUpdatableTask {
public Execution update(Execution execution, RunContext runContext) throws Exception {
Map<String, Object> renderedVars = runContext.render(this.variables).asMap(String.class, Object.class);
boolean renderedOverwrite = runContext.render(overwrite).as(Boolean.class).orElseThrow();
Map<String, Object> currentVariables =
execution.getVariables() == null ? Collections.emptyMap() : execution.getVariables();
if (!renderedOverwrite) {
// check that none of the new variables already exist
List<String> duplicated = renderedVars.keySet().stream()
.filter(currentVariables::containsKey)
.toList();
List<String> duplicated = renderedVars.keySet().stream().filter(key -> execution.getVariables().containsKey(key)).toList();
if (!duplicated.isEmpty()) {
throw new IllegalArgumentException(
"`overwrite` is set to false and the following variables already exist: " +
String.join(",", duplicated)
);
throw new IllegalArgumentException("`overwrite` is set to false and the following variables already exist: " + String.join(",", duplicated));
}
}
return execution.withVariables(MapUtils.deepMerge(currentVariables, renderedVars));
return execution.withVariables(MapUtils.deepMerge(execution.getVariables(), renderedVars));
}
}

View File

@@ -183,7 +183,6 @@ public class Webhook extends AbstractTrigger implements TriggerOutput<Webhook.Ou
.flowId(flow.getId())
.flowRevision(flow.getRevision())
.inputs(inputs)
.variables(flow.getVariables())
.state(new State())
.trigger(ExecutionTrigger.of(
this,

View File

@@ -103,7 +103,7 @@ class ClassPluginDocumentationTest {
PluginClassAndMetadata<AbstractTrigger> metadata = PluginClassAndMetadata.create(scan, Schedule.class, AbstractTrigger.class, null);
ClassPluginDocumentation<? extends AbstractTrigger> doc = ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, scan.version(), true);
assertThat(doc.getDefs()).hasSize(23);
assertThat(doc.getDefs().size()).isEqualTo(20);
assertThat(doc.getDocLicense()).isNull();
assertThat(((Map<String, Object>) doc.getDefs().get("io.kestra.core.models.tasks.WorkerGroup")).get("type")).isEqualTo("object");
@@ -142,9 +142,9 @@ class ClassPluginDocumentationTest {
ClassPluginDocumentation<? extends DynamicPropertyExampleTask> doc = ClassPluginDocumentation.of(jsonSchemaGenerator, metadata, scan.version(), true);
assertThat(doc.getCls()).isEqualTo("io.kestra.core.models.property.DynamicPropertyExampleTask");
assertThat(doc.getDefs()).hasSize(9);
assertThat(doc.getDefs()).hasSize(6);
Map<String, Object> properties = (Map<String, Object>) doc.getPropertiesSchema().get("properties");
assertThat(properties).hasSize(22);
assertThat(properties).hasSize(21);
Map<String, Object> number = (Map<String, Object>) properties.get("number");
assertThat(number.get("anyOf")).isNotNull();

View File

@@ -47,6 +47,7 @@ import static org.hamcrest.Matchers.*;
@KestraTest
class JsonSchemaGeneratorTest {
@Inject
JsonSchemaGenerator jsonSchemaGenerator;
@@ -345,7 +346,7 @@ class JsonSchemaGeneratorTest {
void pluginSchemaShouldNotResolveTaskAndTriggerSubtypes() {
Map<String, Object> generate = jsonSchemaGenerator.properties(null, TaskWithSubTaskAndSubTrigger.class);
var definitions = (Map<String, Map<String, Object>>) generate.get("$defs");
assertThat(definitions.size(), is(30));
assertThat(definitions.size(), is(27));
}
@SuppressWarnings("unchecked")

View File

@@ -15,11 +15,13 @@ import jakarta.inject.Inject;
import jakarta.validation.constraints.NotNull;
import lombok.*;
import lombok.experimental.SuperBuilder;
import org.hamcrest.MatcherAssert;
import org.junit.jupiter.api.Test;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.is;
@KestraTest(startRunner = true)
class AdditionalPluginTest {
@@ -41,7 +43,7 @@ class AdditionalPluginTest {
void shouldResolveAdditionalPluginSubtypes() {
Map<String, Object> generate = jsonSchemaGenerator.properties(null, AdditionalPluginTest.AdditionalPluginTestTask.class);
var definitions = (Map<String, Map<String, Object>>) generate.get("$defs");
assertThat(definitions).hasSize(10);
assertThat(definitions).hasSize(7);
assertThat(definitions).containsKey("io.kestra.core.plugins.AdditionalPluginTest-AdditionalPluginTest1");
assertThat(definitions).containsKey("io.kestra.core.plugins.AdditionalPluginTest-AdditionalPluginTest2");
}

View File

@@ -95,4 +95,4 @@ class PluginDeserializerTest {
public record TestPlugin(String type) implements Plugin {
}
}
}

View File

@@ -558,4 +558,4 @@ public abstract class AbstractRunnerTest {
public void shouldCallTasksAfterListener(Execution execution) {
afterExecutionTestCase.shouldCallTasksAfterListener(execution);
}
}
}

View File

@@ -1,25 +1,25 @@
package io.kestra.core.runners;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.TestsUtils;
import java.time.Duration;
import java.util.List;
import java.util.concurrent.TimeoutException;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.plugin.core.debug.Return;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.IdUtils;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicInteger;
import io.kestra.core.utils.TestsUtils;
import io.kestra.plugin.core.debug.Return;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
@@ -27,6 +27,9 @@ abstract public class FlowListenersTest {
@Inject
protected FlowRepositoryInterface flowRepository;
@Inject
protected FlowListenersInterface flowListenersService;
protected static FlowWithSource create(String tenantId, String flowId, String taskId) {
FlowWithSource flow = FlowWithSource.builder()
.id(flowId)
@@ -44,60 +47,56 @@ abstract public class FlowListenersTest {
private static final Logger LOG = LoggerFactory.getLogger(FlowListenersTest.class);
public void suite(FlowListenersInterface flowListenersService) throws TimeoutException {
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
flowListenersService.run();
@Test
public void all() throws Exception {
FlowListenersInterface finalFlowListenersService = flowListenersService;
try (finalFlowListenersService) {
finalFlowListenersService.run();
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
AtomicInteger count = new AtomicInteger();
AtomicInteger count = new AtomicInteger();
flowListenersService.listen(flows -> count.set(getFlowsForTenant(flowListenersService, tenant).size()));
flowListenersService.listen(flows -> count.set(getFlowsForTenant(flowListenersService, tenant).size()));
// initial state
LOG.info("-----------> wait for zero");
Await.until(() -> count.get() == 0, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isZero();
// resend on startup done for kafka
LOG.info("-----------> wait for zero kafka");
if (flowListenersService.getClass().getName().equals("io.kestra.ee.runner.kafka.KafkaFlowListeners")) {
// initial state
LOG.info("-----------> wait for zero");
Await.until(() -> count.get() == 0, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isZero();
// create first
LOG.info("-----------> create first flow");
FlowWithSource first = create(tenant, "first_" + IdUtils.create(), "test");
FlowWithSource firstUpdated = create(tenant, first.getId(), "test2");
flowRepository.create(GenericFlow.of(first));
Await.until(() -> "Expected to have 1 flow but got " + count.get(), () -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
// create the same id than first, no additional flows
first = flowRepository.update(GenericFlow.of(firstUpdated), first);
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
FlowWithSource second = create(tenant, "second_" + IdUtils.create(), "test");
// create a new one
flowRepository.create(GenericFlow.of(second));
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
// delete first
FlowWithSource deleted = flowRepository.delete(first);
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
// restore must works
flowRepository.create(GenericFlow.of(first));
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
}
// create first
LOG.info("-----------> create fist flow");
FlowWithSource first = create(tenant, "first_" + IdUtils.create(), "test");
FlowWithSource firstUpdated = create(tenant, first.getId(), "test2");
flowRepository.create(GenericFlow.of(first));
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
// create the same id than first, no additional flows
first = flowRepository.update(GenericFlow.of(firstUpdated), first);
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
FlowWithSource second = create(tenant, "second_" + IdUtils.create(), "test");
// create a new one
flowRepository.create(GenericFlow.of(second));
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
// delete first
FlowWithSource deleted = flowRepository.delete(first);
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
// restore must works
flowRepository.create(GenericFlow.of(first));
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
}
public List<FlowWithSource> getFlowsForTenant(FlowListenersInterface flowListenersService, String tenantId){
public List<FlowWithSource> getFlowsForTenant(FlowListenersInterface flowListenersService, String tenantId) {
return flowListenersService.flows().stream()
.filter(f -> tenantId.equals(f.getTenantId()))
.toList();

View File

@@ -1,28 +0,0 @@
package io.kestra.core.runners.pebble.functions;
import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import java.net.URI;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
@KestraTest
public class AbstractFileFunctionTest {
@Inject
ReadFileFunction readFileFunction;
@Test
void namespaceFromURI(){
String namespace1 = readFileFunction.extractNamespace(URI.create("kestra:///demo/simple-write-oss/executions/4Tnd2zrWGoHGrufwyt738j/tasks/write/2FOeylkRr5tktwIQqFh56w/18316959863401460785.txt"));
assertThat(namespace1).isEqualTo("demo");
String namespace2 = readFileFunction.extractNamespace(URI.create("kestra:///io/kestra/tests/simple-write-oss/executions/4Tnd2zrWGoHGrufwyt738j/tasks/write/2FOeylkRr5tktwIQqFh56w/18316959863401460785.txt"));
assertThat(namespace2).isEqualTo("io.kestra.tests");
assertThrows(IllegalArgumentException.class, () ->readFileFunction.extractNamespace(URI.create("kestra:///simple-write-oss/executions/4Tnd2zrWGoHGrufwyt738j/tasks/write/2FOeylkRr5tktwIQqFh56w/18316959863401460785.txt")));
assertThrows(IllegalArgumentException.class, () ->readFileFunction.extractNamespace(URI.create("kestra:///executions/4Tnd2zrWGoHGrufwyt738j/tasks/write/2FOeylkRr5tktwIQqFh56w/18316959863401460785.txt")));
}
}

View File

@@ -1,29 +0,0 @@
package io.kestra.core.runners.test;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.assets.Asset;
import io.kestra.core.models.tasks.*;
import io.kestra.core.runners.RunContext;
import jakarta.validation.constraints.NotNull;
import lombok.*;
import lombok.experimental.SuperBuilder;
@SuperBuilder
@ToString
@EqualsAndHashCode
@Getter
@NoArgsConstructor
@Plugin
public class AssetEmitter extends Task implements RunnableTask<VoidOutput> {
@NotNull
@PluginProperty
private Asset assetToEmit;
@Override
public VoidOutput run(RunContext runContext) throws Exception {
runContext.assets().upsert(assetToEmit);
return null;
}
}

View File

@@ -44,6 +44,6 @@ public class SleepTrigger extends AbstractTrigger implements PollingTriggerInter
@Override
public Duration getInterval() {
return null;
return Duration.ofSeconds(1);
}
}

View File

@@ -1,7 +1,5 @@
package io.kestra.core.validations;
import io.kestra.core.models.assets.AssetIdentifier;
import io.kestra.core.models.assets.AssetsDeclaration;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.validations.ModelValidator;
@@ -9,9 +7,7 @@ import io.kestra.core.serializers.YamlParser;
import io.kestra.core.tenant.TenantService;
import io.kestra.core.utils.TestsUtils;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.plugin.core.log.Log;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolation;
import org.junit.jupiter.api.Test;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.services.FlowService;
@@ -233,31 +229,6 @@ class FlowValidationTest {
assertThat(validate.get().getMessage()).contains("Duplicate preconditions with id [flows]");
}
@Test
void eeAllowsDefiningAssets() {
Flow flow = Flow.builder()
.id(TestsUtils.randomString())
.namespace(TestsUtils.randomNamespace())
.tasks(List.of(
Log.builder()
.id("log")
.type(Log.class.getName())
.message("any")
.assets(io.kestra.core.models.property.Property.ofValue(
new AssetsDeclaration(true, List.of(new AssetIdentifier(null, null, "anyId")), null))
)
.build()
))
.build();
Optional<ConstraintViolationException> violations = modelValidator.isValid(flow);
assertThat(violations.isPresent()).isEqualTo(true);
assertThat(violations.get().getConstraintViolations().stream().map(ConstraintViolation::getMessage)).satisfiesExactly(
message -> assertThat(message).contains("Task 'log' can't have any `assets` because assets are only available in Enterprise Edition.")
);
};
private Flow parse(String path) {
URL resource = TestsUtils.class.getClassLoader().getResource(path);
assert resource != null;
@@ -266,4 +237,4 @@ class FlowValidationTest {
return YamlParser.parse(file, Flow.class);
}
}
}

View File

@@ -10,7 +10,7 @@ tasks:
message: "{{ task.id }}"
- id: pause
type: io.kestra.plugin.core.flow.Pause
pauseDuration: PT1S
delay: PT1S
tasks:
- id: c
type: io.kestra.plugin.core.log.Log

View File

@@ -1,14 +1,9 @@
package io.kestra.executor;
import io.kestra.core.assets.AssetService;
import io.kestra.core.debug.Breakpoint;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.Label;
import io.kestra.core.models.assets.AssetIdentifier;
import io.kestra.core.models.assets.AssetUser;
import io.kestra.core.models.assets.AssetsDeclaration;
import io.kestra.core.models.assets.AssetsInOut;
import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
@@ -101,12 +96,6 @@ public class ExecutorService {
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
private QueueInterface<LogEntry> logQueue;
@Inject
private AssetService assetService;
@Inject
private RunContextInitializer runContextInitializer;
protected FlowMetaStoreInterface flowExecutorInterface() {
// bean is injected late, so we need to wait
if (this.flowExecutorInterface == null) {
@@ -907,35 +896,21 @@ public class ExecutorService {
boolean hasMockedWorkerTask = false;
record FixtureAndTaskRun(TaskFixture fixture, TaskRun taskRun) {}
if (executor.getExecution().getFixtures() != null) {
RunContext runContext = runContextInitializer.forExecutor((DefaultRunContext) runContextFactory.of(
executor.getFlow(),
executor.getExecution()
));
RunContext runContext = runContextFactory.of(executor.getFlow(), executor.getExecution());
List<WorkerTaskResult> workerTaskResults = executor.getExecution()
.getTaskRunList()
.stream()
.filter(taskRun -> taskRun.getState().getCurrent().isCreated())
.flatMap(taskRun -> executor.getExecution().getFixtureForTaskRun(taskRun).stream().map(fixture -> new FixtureAndTaskRun(fixture, taskRun)))
.map(throwFunction(fixtureAndTaskRun -> {
Optional<AssetsDeclaration> renderedAssetsDeclaration = runContext.render(executor.getFlow().findTaskByTaskId(fixtureAndTaskRun.taskRun.getTaskId()).getAssets()).as(AssetsDeclaration.class);
return WorkerTaskResult.builder()
.taskRun(fixtureAndTaskRun.taskRun()
.withState(Optional.ofNullable(fixtureAndTaskRun.fixture().getState()).orElse(State.Type.SUCCESS))
.withOutputs(
variablesService.of(StorageContext.forTask(fixtureAndTaskRun.taskRun),
fixtureAndTaskRun.fixture().getOutputs() == null ? null : runContext.render(fixtureAndTaskRun.fixture().getOutputs()))
)
.withAssets(new AssetsInOut(
renderedAssetsDeclaration.map(AssetsDeclaration::getInputs).orElse(Collections.emptyList()).stream()
.map(assetIdentifier -> assetIdentifier.withTenantId(executor.getFlow().getTenantId()))
.toList(),
fixtureAndTaskRun.fixture().getAssets() == null ? null : fixtureAndTaskRun.fixture().getAssets().stream()
.map(asset -> asset.withTenantId(executor.getFlow().getTenantId()))
.toList()
))
)
.build();
}
.map(throwFunction(fixtureAndTaskRun -> WorkerTaskResult.builder()
.taskRun(fixtureAndTaskRun.taskRun()
.withState(Optional.ofNullable(fixtureAndTaskRun.fixture().getState()).orElse(State.Type.SUCCESS))
.withOutputs(
variablesService.of(StorageContext.forTask(fixtureAndTaskRun.taskRun),
fixtureAndTaskRun.fixture().getOutputs() == null ? null : runContext.render(fixtureAndTaskRun.fixture().getOutputs()))
)
)
.build()
))
.toList();
@@ -1197,47 +1172,6 @@ public class ExecutorService {
metricRegistry.tags(workerTaskResult)
)
.record(taskRun.getState().getDurationOrComputeIt());
if (
!taskRun.getState().isFailed()
&& taskRun.getAssets() != null &&
(!taskRun.getAssets().getInputs().isEmpty() || !taskRun.getAssets().getOutputs().isEmpty())
) {
AssetUser assetUser = new AssetUser(
taskRun.getTenantId(),
taskRun.getNamespace(),
taskRun.getFlowId(),
newExecution.getFlowRevision(),
taskRun.getExecutionId(),
taskRun.getTaskId(),
taskRun.getId()
);
List<AssetIdentifier> outputIdentifiers = taskRun.getAssets().getOutputs().stream()
.map(asset -> asset.withTenantId(taskRun.getTenantId()))
.map(AssetIdentifier::of)
.toList();
List<AssetIdentifier> inputAssets = taskRun.getAssets().getInputs().stream()
.map(assetIdentifier -> assetIdentifier.withTenantId(taskRun.getTenantId()))
.toList();
try {
assetService.assetLineage(
assetUser,
inputAssets,
outputIdentifiers
);
} catch (QueueException e) {
log.warn("Unable to submit asset lineage event for {} -> {}", inputAssets, outputIdentifiers, e);
}
taskRun.getAssets().getOutputs().forEach(asset -> {
try {
assetService.asyncUpsert(assetUser, asset);
} catch (QueueException e) {
log.warn("Unable to submit asset upsert event for asset {}", asset.getId(), e);
}
});
}
}
}

View File

@@ -1,3 +0,0 @@
ALTER TABLE triggers
ADD COLUMN "disabled" BOOL
GENERATED ALWAYS AS (JQ_BOOLEAN("value", '.disabled')) NOT NULL;

View File

@@ -1,22 +0,0 @@
ALTER TABLE queues ALTER COLUMN "type" ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface',
'io.kestra.core.runners.MultipleConditionEvent',
'io.kestra.ee.assets.AssetLineageEvent',
'io.kestra.ee.assets.AssetUpsertCommand',
'io.kestra.ee.assets.AssetStateEvent'
) NOT NULL

View File

@@ -1,32 +1,6 @@
package io.kestra.runner.h2;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.FlowListeners;
import io.kestra.core.runners.FlowListenersTest;
import io.kestra.core.services.PluginDefaultService;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import java.util.concurrent.TimeoutException;
import org.junit.jupiter.api.Test;
class H2FlowListenersTest extends FlowListenersTest {
@Inject
FlowRepositoryInterface flowRepository;
@Inject
@Named(QueueFactoryInterface.FLOW_NAMED)
QueueInterface<FlowInterface> flowQueue;
@Inject
PluginDefaultService pluginDefaultService;
@Test
public void all() throws TimeoutException {
// we don't inject FlowListeners to remove a flaky test
this.suite(new FlowListeners(flowRepository, flowQueue, pluginDefaultService));
}
}
}

View File

@@ -1,3 +0,0 @@
ALTER TABLE triggers
ADD COLUMN `disabled` BOOL
GENERATED ALWAYS AS (value ->> '$.disabled' = 'true') STORED NOT NULL

View File

@@ -1,22 +0,0 @@
ALTER TABLE queues MODIFY COLUMN `type` ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface',
'io.kestra.core.runners.MultipleConditionEvent',
'io.kestra.ee.assets.AssetLineageEvent',
'io.kestra.ee.assets.AssetUpsertCommand',
'io.kestra.ee.assets.AssetStateEvent'
) NOT NULL;

View File

@@ -1,17 +1,6 @@
package io.kestra.runner.mysql;
import io.kestra.core.runners.FlowListeners;
import io.kestra.core.runners.FlowListenersTest;
import jakarta.inject.Inject;
import java.util.concurrent.TimeoutException;
import org.junit.jupiter.api.Test;
class MysqlFlowListenersTest extends FlowListenersTest {
@Inject
FlowListeners flowListenersService;
@Test
public void all() throws TimeoutException {
this.suite(flowListenersService);
}
}
}

View File

@@ -1,17 +1,6 @@
package io.kestra.schedulers.mysql;
import io.kestra.core.runners.FlowListeners;
import io.kestra.jdbc.runner.JdbcScheduler;
import io.kestra.scheduler.AbstractScheduler;
import io.kestra.scheduler.SchedulerExecutionStateInterface;
import io.kestra.scheduler.SchedulerScheduleTest;
class MysqlSchedulerScheduleTest extends SchedulerScheduleTest {
@Override
protected AbstractScheduler scheduler(FlowListeners flowListenersServiceSpy, SchedulerExecutionStateInterface executionStateSpy) {
return new JdbcScheduler(
applicationContext,
flowListenersServiceSpy
);
}
}

View File

@@ -1,4 +0,0 @@
ALTER TABLE triggers
ADD COLUMN "disabled" BOOL
GENERATED ALWAYS AS (CAST(value ->> 'disabled' AS BOOL)) STORED NOT NULL;

View File

@@ -1,3 +0,0 @@
ALTER TYPE queue_type ADD VALUE IF NOT EXISTS 'io.kestra.ee.assets.AssetLineageEvent';
ALTER TYPE queue_type ADD VALUE IF NOT EXISTS 'io.kestra.ee.assets.AssetUpsertCommand';
ALTER TYPE queue_type ADD VALUE IF NOT EXISTS 'io.kestra.ee.assets.AssetStateEvent';

View File

@@ -1,17 +1,6 @@
package io.kestra.runner.postgres;
import io.kestra.core.runners.FlowListeners;
import io.kestra.core.runners.FlowListenersTest;
import jakarta.inject.Inject;
import java.util.concurrent.TimeoutException;
import org.junit.jupiter.api.Test;
class PostgresFlowListenersTest extends FlowListenersTest {
@Inject
FlowListeners flowListenersService;
@Test
public void all() throws TimeoutException {
this.suite(flowListenersService);
}
}
}

View File

@@ -1,17 +1,6 @@
package io.kestra.schedulers.postgres;
import io.kestra.core.runners.FlowListeners;
import io.kestra.jdbc.runner.JdbcScheduler;
import io.kestra.scheduler.AbstractScheduler;
import io.kestra.scheduler.SchedulerExecutionStateInterface;
import io.kestra.scheduler.SchedulerScheduleTest;
class PostgresSchedulerScheduleTest extends SchedulerScheduleTest {
@Override
protected AbstractScheduler scheduler(FlowListeners flowListenersServiceSpy, SchedulerExecutionStateInterface executionStateSpy) {
return new JdbcScheduler(
applicationContext,
flowListenersServiceSpy
);
}
}

View File

@@ -324,14 +324,6 @@ public abstract class AbstractJdbcRepository {
}
}
if(field == QueryFilter.Field.TRIGGER_STATE){
return applyTriggerStateCondition(value, operation);
}
if (field.equals(QueryFilter.Field.METADATA)) {
return findMetadataCondition((Map<?, ?>) value, operation);
}
// Convert the field name to lowercase and quote it
Name columnName = getColumnName(field);
@@ -349,7 +341,7 @@ public abstract class AbstractJdbcRepository {
case CONTAINS -> DSL.field(columnName).like("%" + value + "%");
case REGEX -> DSL.field(columnName).likeRegex((String) value);
case PREFIX -> DSL.field(columnName).like(value + "%")
.or(DSL.field(columnName).eq(value));
.or(DSL.field(columnName).eq(value));
default -> throw new InvalidQueryFiltersException("Unsupported operation: " + operation);
};
}
@@ -384,10 +376,6 @@ public abstract class AbstractJdbcRepository {
throw new InvalidQueryFiltersException("Unsupported operation: " + operation);
}
protected Condition findMetadataCondition(Map<?, ?> metadata, QueryFilter.Op operation) {
throw new InvalidQueryFiltersException("Unsupported operation: " + operation);
}
// Generate the condition for Field.STATE
@SuppressWarnings("unchecked")
private Condition generateStateCondition(Object value, QueryFilter.Op operation) {
@@ -481,23 +469,6 @@ public abstract class AbstractJdbcRepository {
};
}
private Condition applyTriggerStateCondition(Object value, QueryFilter.Op operation) {
String triggerState = value.toString();
Boolean isDisabled = switch (triggerState) {
case "disabled" -> true;
case "enabled" -> false;
default -> null;
};
if (isDisabled == null) {
return DSL.noCondition();
}
return switch (operation) {
case EQUALS -> field("disabled").eq(isDisabled);
case NOT_EQUALS -> field("disabled").ne(isDisabled);
default -> throw new InvalidQueryFiltersException("Unsupported operation for Trigger State: " + operation);
};
}
protected Field<Date> formatDateField(String dateField, DateUtils.GroupType groupType) {
throw new UnsupportedOperationException("formatDateField() not implemented");
}

View File

@@ -297,23 +297,10 @@ public class JdbcExecutor implements ExecutorInterface {
this.receiveCancellations.addFirst(((JdbcQueue<Execution>) this.executionQueue).receiveBatch(
Executor.class,
executions -> {
// process execution message grouped by executionId to avoid concurrency as the execution level as it would
List<CompletableFuture<Void>> perExecutionFutures = executions.stream()
.filter(Either::isLeft)
.collect(Collectors.groupingBy(either -> either.getLeft().getId()))
.values()
.stream()
.map(eithers -> CompletableFuture.runAsync(() -> {
eithers.forEach(this::executionQueue);
}, executionExecutorService))
List<CompletableFuture<Void>> futures = executions.stream()
.map(execution -> CompletableFuture.runAsync(() -> executionQueue(execution), executionExecutorService))
.toList();
// directly process deserialization issues as most of the time there will be none
executions.stream()
.filter(Either::isRight)
.forEach(either -> executionQueue(either));
CompletableFuture.allOf(perExecutionFutures.toArray(CompletableFuture[]::new)).join();
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
}
));
this.receiveCancellations.addFirst(((JdbcQueue<WorkerTaskResult>) this.workerTaskResultQueue).receiveBatch(
@@ -1069,7 +1056,9 @@ public class JdbcExecutor implements ExecutorInterface {
log.error("Unable to kill the execution {}", killedExecution.getExecutionId(), e);
}
log.error("KILLING FROM EXECUTOR");
Executor executor = killingOrAfterKillState(killedExecution.getExecutionId(), Optional.ofNullable(killedExecution.getExecutionState()));
log.error("KILLED FROM EXECUTOR");
// Check whether kill event should be propagated to downstream executions.
// By default, always propagate the ExecutionKill to sub-flows (for backward compatibility).
@@ -1096,20 +1085,24 @@ public class JdbcExecutor implements ExecutorInterface {
}
private Executor killingOrAfterKillState(final String executionId, Optional<State.Type> afterKillState) {
return executionRepository.lock(executionId, pair -> {
Execution currentExecution = pair.getLeft();
FlowInterface flow = flowMetaStore.findByExecution(currentExecution).orElseThrow();
try {
return executionRepository.lock(executionId, pair -> {
Execution currentExecution = pair.getLeft();
FlowInterface flow = flowMetaStore.findByExecution(currentExecution).orElseThrow();
// remove it from the queued store if it was queued so it would not be restarted
if (currentExecution.getState().isQueued()) {
executionQueuedStorage.remove(currentExecution);
}
// remove it from the queued store if it was queued so it would not be restarted
if (currentExecution.getState().isQueued()) {
executionQueuedStorage.remove(currentExecution);
}
Execution killing = executionService.kill(currentExecution, flow, afterKillState);
Executor current = new Executor(currentExecution, null)
.withExecution(killing, "joinKillingExecution");
return Pair.of(current, pair.getRight());
});
Execution killing = executionService.kill(currentExecution, flow, afterKillState);
Executor current = new Executor(currentExecution, null)
.withExecution(killing, "joinKillingExecution");
return Pair.of(current, pair.getRight());
});
} catch (Exception e) {
throw new RuntimeException("ISSUE WHILE KILLING EXECUTION " + executionId, e);
}
}
private void toExecution(Executor executor) {

View File

@@ -23,6 +23,7 @@ import io.micronaut.context.annotation.Value;
import io.micronaut.transaction.exceptions.CannotCreateTransactionException;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.awaitility.Awaitility;
import org.jooq.*;
import org.jooq.Record;
import org.jooq.exception.DataException;
@@ -301,20 +302,27 @@ public abstract class JdbcQueue<T> implements QueueInterface<T> {
Timer timer = this.metricRegistry
.timer(MetricRegistry.METRIC_QUEUE_RECEIVE_DURATION, MetricRegistry.METRIC_QUEUE_RECEIVE_DURATION_DESCRIPTION, tags);
StackTraceElement[] parentStackTrace = Thread.currentThread().getStackTrace();
return this.poll(() -> timer.record(() -> {
Result<Record> fetch = dslContextWrapper.transactionResult(configuration -> {
DSLContext ctx = DSL.using(configuration);
Result<Record> fetch;
try {
fetch = dslContextWrapper.transactionResult(configuration -> {
DSLContext ctx = DSL.using(configuration);
Result<Record> result = this.receiveFetch(ctx, consumerGroup, maxOffset.get(), forUpdate);
Result<Record> result = this.receiveFetch(ctx, consumerGroup, maxOffset.get(), forUpdate);
if (!result.isEmpty()) {
List<Integer> offsets = result.map(record -> record.get("offset", Integer.class));
if (!result.isEmpty()) {
List<Integer> offsets = result.map(record -> record.get("offset", Integer.class));
maxOffset.set(offsets.getLast());
}
maxOffset.set(offsets.getLast());
}
return result;
});
return result;
});
} catch (Exception e) {
log.error("Error while receiving messages from JDBC queue. Thread stacktrace: {}", parentStackTrace, e);
throw e;
}
this.send(fetch, consumer);
@@ -426,13 +434,14 @@ public abstract class JdbcQueue<T> implements QueueInterface<T> {
@SuppressWarnings("BusyWait")
protected Runnable poll(Supplier<Integer> runnable) {
AtomicBoolean queriedToStop = new AtomicBoolean(false);
AtomicBoolean running = new AtomicBoolean(true);
poolExecutor.execute(() -> {
List<Configuration.Step> steps = configuration.computeSteps();
Duration sleep = configuration.minPollInterval;
ZonedDateTime lastPoll = ZonedDateTime.now();
while (running.get() && !this.isClosed.get()) {
while (!queriedToStop.get() && !this.isClosed.get()) {
if (!this.isPaused.get()) {
try {
Integer count = runnable.get();
@@ -471,9 +480,21 @@ public abstract class JdbcQueue<T> implements QueueInterface<T> {
throw new RuntimeException(e);
}
}
running.set(false);
});
return () -> running.set(false);
return () -> {
queriedToStop.set(true);
try {
Awaitility.await()
.atMost(Duration.ofSeconds(30))
.pollInterval(Duration.ofMillis(10))
.until(() -> !running.get());
} catch (Exception e) {
log.warn("Error while stopping polling", e);
}
};
}
protected List<Either<T, DeserializationException>> map(Result<Record> fetch) {

View File

@@ -19,6 +19,7 @@ import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiConsumer;
@@ -59,7 +60,7 @@ public class JdbcScheduler extends AbstractScheduler {
});
// No-op consumption of the trigger queue, so the events are purged from the queue
this.triggerQueue.receive(Scheduler.class, trigger -> { });
this.receiveCancellations.add(this.triggerQueue.receive(Scheduler.class, trigger -> { }));
}
@Override

View File

@@ -35,7 +35,7 @@ dependencies {
// we define cloud bom here for GCP, Azure and AWS so they are aligned for all plugins that use them (secret, storage, oss and ee plugins)
api platform('com.google.cloud:libraries-bom:26.73.0')
api platform("com.azure:azure-sdk-bom:1.3.3")
api platform('software.amazon.awssdk:bom:2.40.15')
api platform('software.amazon.awssdk:bom:2.40.10')
api platform("dev.langchain4j:langchain4j-bom:$langchain4jVersion")
api platform("dev.langchain4j:langchain4j-community-bom:$langchain4jCommunityVersion")
@@ -98,7 +98,7 @@ dependencies {
api group: 'org.apache.maven.resolver', name: 'maven-resolver-connector-basic', version: mavenResolverVersion
api group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-file', version: mavenResolverVersion
api group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-apache', version: mavenResolverVersion
api 'com.github.oshi:oshi-core:6.9.2'
api 'com.github.oshi:oshi-core:6.9.1'
api 'io.pebbletemplates:pebble:4.1.0'
api group: 'co.elastic.logging', name: 'logback-ecs-encoder', version: '1.7.0'
api group: 'de.focus-shift', name: 'jollyday-core', version: jollydayVersion

View File

@@ -25,6 +25,7 @@ import io.kestra.core.runners.FlowListeners;
import io.kestra.core.utils.Await;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import lombok.SneakyThrows;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
@@ -85,11 +86,18 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.truncatedTo(ChronoUnit.HOURS);
}
protected AbstractScheduler scheduler(FlowListeners flowListenersServiceSpy, SchedulerExecutionStateInterface executionStateSpy) {
protected AbstractScheduler scheduler(FlowListeners flowListenersServiceSpy) {
return new JdbcScheduler(
applicationContext,
flowListenersServiceSpy
);
) {
@SneakyThrows
@Override
public void close() {
super.close();
flowListenersServiceSpy.close();
}
};
}
@BeforeEach
@@ -139,7 +147,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
triggerState.create(trigger.toBuilder().triggerId("schedule-invalid").flowId(invalid.getId()).build());
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionStateSpy)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
// wait for execution
Flux<Execution> receiveExecutions = TestsUtils.receive(executionQueue, throwConsumer(either -> {
Execution execution = either.getLeft();
@@ -200,7 +208,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
triggerState.create(trigger);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
scheduler.run();
Await.until(() -> {
@@ -235,7 +243,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
CountDownLatch queueCount = new CountDownLatch(1);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
// wait for execution
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
@@ -281,7 +289,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
CountDownLatch queueCount = new CountDownLatch(1);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
// wait for execution
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
@@ -326,7 +334,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
triggerState.create(lastTrigger);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
scheduler.run();
Await.until(() -> scheduler.isReady(), Duration.ofMillis(100), Duration.ofSeconds(5));
@@ -358,7 +366,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.build();
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
scheduler.run();
Await.until(() -> {
@@ -422,7 +430,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
triggerState.create(trigger);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
scheduler.run();
// Wait 3s to see if things happen
@@ -462,7 +470,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
CountDownLatch queueCount = new CountDownLatch(2);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
// wait for execution
Flux<Execution> receive = TestsUtils.receive(executionQueue, throwConsumer(either -> {
Execution execution = either.getLeft();
@@ -522,7 +530,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
CountDownLatch queueCount = new CountDownLatch(1);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
// wait for execution
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
@@ -635,7 +643,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
CountDownLatch queueCount = new CountDownLatch(1);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
// wait for execution
Flux<Execution> receive = TestsUtils.receive(executionQueue, throwConsumer(either -> {
Execution execution = either.getLeft();
@@ -708,7 +716,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
CountDownLatch queueCount = new CountDownLatch(1);
// scheduler
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy, executionState)) {
try (AbstractScheduler scheduler = scheduler(flowListenersServiceSpy)) {
// wait for execution
Flux<Execution> receive = TestsUtils.receive(executionQueue, throwConsumer(either -> {
Execution execution = either.getLeft();
@@ -741,4 +749,4 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
Await.until(() -> this.triggerState.findLast(trigger).map(t -> t.getNextExecutionDate().isAfter(lastTrigger.getNextExecutionDate().plusSeconds(10))).orElse(false).booleanValue(), Duration.ofMillis(100), Duration.ofSeconds(20));
}
}
}
}

View File

@@ -7,12 +7,10 @@ import io.micronaut.context.annotation.Factory;
import io.micronaut.context.annotation.Requires;
import io.micronaut.test.annotation.TransactionMode;
import io.micronaut.test.condition.TestActiveCondition;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.extension.ExtendWith;
import java.lang.annotation.*;
@Tag("integration")
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.ANNOTATION_TYPE, ElementType.TYPE})
@ExtendWith(KestraTestExtension.class)

View File

@@ -8,31 +8,25 @@ import io.micronaut.test.extensions.junit5.MicronautJunit5Extension;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.platform.commons.support.AnnotationSupport;
import java.util.Set;
public class KestraTestExtension extends MicronautJunit5Extension {
@Override
protected MicronautTestValue buildMicronautTestValue(Class<?> testClass) {
testProperties.put("kestra.jdbc.executor.thread-count", Runtime.getRuntime().availableProcessors() * 4);
return AnnotationSupport
.findAnnotation(testClass, KestraTest.class)
.map(kestraTestAnnotation -> {
var envsSet = new java.util.HashSet<>(Set.of(kestraTestAnnotation.environments()));
envsSet.add("test");// add test env if not already present
return new MicronautTestValue(
kestraTestAnnotation.application(),
envsSet.toArray(new String[0]),
kestraTestAnnotation.packages(),
kestraTestAnnotation.propertySources(),
kestraTestAnnotation.rollback(),
kestraTestAnnotation.transactional(),
kestraTestAnnotation.rebuildContext(),
kestraTestAnnotation.contextBuilder(),
kestraTestAnnotation.transactionMode(),
kestraTestAnnotation.startApplication(),
kestraTestAnnotation.resolveParameters()
);
})
.map(kestraTestAnnotation -> new MicronautTestValue(
kestraTestAnnotation.application(),
kestraTestAnnotation.environments(),
kestraTestAnnotation.packages(),
kestraTestAnnotation.propertySources(),
kestraTestAnnotation.rollback(),
kestraTestAnnotation.transactional(),
kestraTestAnnotation.rebuildContext(),
kestraTestAnnotation.contextBuilder(),
kestraTestAnnotation.transactionMode(),
kestraTestAnnotation.startApplication(),
kestraTestAnnotation.resolveParameters()
))
.orElse(null);
}

View File

@@ -74,7 +74,7 @@ abstract public class TestsUtils {
* @param prefix
* @return
*/
public static String randomString(String... prefix) {
private static String randomString(String... prefix) {
if (prefix.length == 0) {
prefix = new String[]{String.join("-", stackTraceToParts())};
}

3114
ui/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -27,7 +27,7 @@
"@kestra-io/ui-libs": "^0.0.268",
"@vue-flow/background": "^1.3.2",
"@vue-flow/controls": "^1.1.2",
"@vue-flow/core": "^1.48.1",
"@vue-flow/core": "^1.48.0",
"@vueuse/core": "^14.1.0",
"ansi-to-html": "^0.7.2",
"axios": "^1.13.2",
@@ -39,7 +39,7 @@
"cytoscape": "^3.33.0",
"dagre": "^0.8.5",
"dotenv": "^17.2.3",
"element-plus": "2.13.0",
"element-plus": "2.12.0",
"humanize-duration": "^3.33.2",
"js-yaml": "^4.1.1",
"lodash": "^4.17.21",
@@ -59,15 +59,15 @@
"path-browserify": "^1.0.1",
"pdfjs-dist": "^5.4.449",
"pinia": "^3.0.4",
"posthog-js": "^1.310.1",
"posthog-js": "^1.308.0",
"rapidoc": "^9.3.8",
"semver": "^7.7.3",
"shiki": "^3.20.0",
"vue": "^3.5.26",
"vue": "^3.5.25",
"vue-axios": "^3.5.2",
"vue-chartjs": "^5.3.3",
"vue-gtag": "^3.6.3",
"vue-i18n": "^11.2.7",
"vue-i18n": "^11.2.2",
"vue-material-design-icons": "^5.3.1",
"vue-router": "^4.6.4",
"vue-sidebar-menu": "^5.9.1",
@@ -97,9 +97,9 @@
"@types/semver": "^7.7.1",
"@types/testing-library__jest-dom": "^6.0.0",
"@types/testing-library__user-event": "^4.2.0",
"@typescript-eslint/parser": "^8.50.1",
"@typescript-eslint/parser": "^8.50.0",
"@vitejs/plugin-vue": "^6.0.3",
"@vitejs/plugin-vue-jsx": "^5.1.3",
"@vitejs/plugin-vue-jsx": "^5.1.2",
"@vitest/browser": "^3.2.4",
"@vitest/coverage-v8": "^3.2.4",
"@vue/eslint-config-prettier": "^10.2.0",
@@ -120,29 +120,29 @@
"playwright": "^1.55.0",
"prettier": "^3.7.4",
"rimraf": "^6.1.2",
"rolldown-vite": "^7.3.0",
"rolldown-vite": "^7.2.11",
"rollup-plugin-copy": "^3.5.0",
"sass": "^1.97.1",
"storybook": "^9.1.17",
"sass": "^1.97.0",
"storybook": "^9.1.16",
"storybook-vue3-router": "^6.0.2",
"ts-node": "^10.9.2",
"typescript": "^5.9.3",
"typescript-eslint": "^8.50.1",
"typescript-eslint": "^8.50.0",
"uuid": "^13.0.0",
"vite": "npm:rolldown-vite@latest",
"vitest": "^3.2.4",
"vue-tsc": "^3.2.1"
"vue-tsc": "^3.1.8"
},
"optionalDependencies": {
"@esbuild/darwin-arm64": "^0.27.2",
"@esbuild/darwin-x64": "^0.27.2",
"@esbuild/linux-x64": "^0.27.2",
"@rollup/rollup-darwin-arm64": "^4.54.0",
"@rollup/rollup-darwin-x64": "^4.54.0",
"@rollup/rollup-linux-x64-gnu": "^4.54.0",
"@swc/core-darwin-arm64": "^1.15.7",
"@swc/core-darwin-x64": "^1.15.7",
"@swc/core-linux-x64-gnu": "^1.15.7"
"@rollup/rollup-darwin-arm64": "^4.53.5",
"@rollup/rollup-darwin-x64": "^4.53.5",
"@rollup/rollup-linux-x64-gnu": "^4.53.5",
"@swc/core-darwin-arm64": "^1.15.5",
"@swc/core-darwin-x64": "^1.15.5",
"@swc/core-linux-x64-gnu": "^1.15.5"
},
"overrides": {
"bootstrap": {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 404 KiB

View File

@@ -43,7 +43,7 @@
REF_PATH_INJECTION_KEY,
ROOT_SCHEMA_INJECTION_KEY,
SCHEMA_DEFINITIONS_INJECTION_KEY,
UPDATE_YAML_FUNCTION_INJECTION_KEY
UPDATE_TASK_FUNCTION_INJECTION_KEY
} from "../../no-code/injectionKeys";
import {NoCodeProps} from "../../flows/noCodeTypes";
import {deepEqual} from "../../../utils/utils";
@@ -68,7 +68,7 @@
dashboardStore.sourceCode = YAML_UTILS.stringify(app);
}
provide(UPDATE_YAML_FUNCTION_INJECTION_KEY, (yaml) => {
provide(UPDATE_TASK_FUNCTION_INJECTION_KEY, (yaml) => {
editorUpdate(yaml)
})

View File

@@ -1,7 +1,7 @@
<template>
<el-dropdown trigger="click" hideOnClick placement="bottom-end">
<el-button :icon="ChartLineVariant" class="selected">
<span v-if="!verticalLayout" class="text-truncate">
<el-button :icon="Menu" class="selected">
<span class="text-truncate">
{{ selected ?? t("dashboards.default") }}
</span>
</el-button>
@@ -75,10 +75,7 @@
import Item from "./Item.vue";
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("sm");
import ChartLineVariant from "vue-material-design-icons/ChartLineVariant.vue";
import Menu from "vue-material-design-icons/Menu.vue";
import Plus from "vue-material-design-icons/Plus.vue";
import Magnify from "vue-material-design-icons/Magnify.vue";

View File

@@ -19,8 +19,7 @@
import {Chart, getDashboard} from "../composables/useDashboards";
import {useChartGenerator} from "../composables/useDashboards";
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("sm");
import {customBarLegend} from "../composables/useLegend";
import {useTheme} from "../../../utils/utils";
@@ -103,7 +102,7 @@
beginAtZero: true,
position: "left",
...DEFAULTS,
display: verticalLayout.value ? false : (props.short ? false : true),
display: props.short ? false : true,
ticks: {
...DEFAULTS.ticks,
callback: value => isDurationAgg() ? Utils.humanDuration(value) : value

View File

@@ -36,9 +36,6 @@
import KestraUtils, {useTheme} from "../../../utils/utils";
import {FilterObject} from "../../../utils/filters";
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("sm");
import {useI18n} from "vue-i18n";
const {t} = useI18n();
@@ -132,7 +129,7 @@
},
position: "left",
...DEFAULTS,
display: verticalLayout.value ? false : (props.short || props.execution ? false : true),
display: props.short || props.execution ? false : true,
ticks: {
...DEFAULTS.ticks,
callback: (value: any) => isDuration(aggregator.value[0]?.[1]?.field) ? KestraUtils.humanDuration(value) : value
@@ -146,7 +143,7 @@
},
position: "right",
...DEFAULTS,
display: verticalLayout.value ? false : (props.short ? false : true),
display: props.short ? false : true,
ticks: {
...DEFAULTS.ticks,
callback: (value: any) => isDuration(aggregator.value[1]?.[1]?.field) ? KestraUtils.humanDuration(value) : value

View File

@@ -1,34 +0,0 @@
<template>
<TopNavBar :title="routeInfo.title" />
<Layout
:title="t(`demos.assets.title`)"
:image="{
source: img,
alt: t(`demos.assets.title`)
}"
:video="{
//TODO: replace with ASSET video
source: 'https://www.youtube.com/embed/jMZ9Cs3xxpo',
}"
>
<template #message>
{{ $t(`demos.assets.message`) }}
</template>
</Layout>
</template>
<script setup lang="ts">
import {computed} from "vue";
import {useI18n} from "vue-i18n";
import img from "../../assets/demo/assets.png";
import useRouteContext from "../../composables/useRouteContext";
import Layout from "./Layout.vue";
import TopNavBar from "../../components/layout/TopNavBar.vue";
const {t} = useI18n();
const routeInfo = computed(() => ({title: t("demos.assets.header")}));
useRouteContext(routeInfo);
</script>

View File

@@ -105,7 +105,7 @@
position: relative;
background: $base-gray-200;
padding: .125rem 0.5rem;
border-radius: 1rem;
border-radius: $border-radius;
display: inline-block;
z-index: 2;
margin: 0 auto;
@@ -175,7 +175,6 @@
line-height: 16px;
font-size: 11px;
text-align: left;
color: var(--ks-content-secondary);
}
.video-container {
@@ -262,7 +261,7 @@
}
p {
font-size: 1rem;
font-size: 14px;
line-height: 22px;
}
}

View File

@@ -42,7 +42,6 @@
:elements="getElements()"
@select="selectNode"
:selected="selectedNodeID"
:subtype="SUBTYPE"
/>
</el-splitter-panel>
</el-splitter>
@@ -55,7 +54,7 @@
import Empty from "../layout/empty/Empty.vue";
import {useDependencies} from "./composables/useDependencies";
import {FLOW, EXECUTION, NAMESPACE, ASSET} from "./utils/types";
import {FLOW, EXECUTION, NAMESPACE} from "./utils/types";
const PANEL = {size: "70%", min: "30%", max: "80%"};
@@ -67,27 +66,13 @@
import SelectionRemove from "vue-material-design-icons/SelectionRemove.vue";
import FitToScreenOutline from "vue-material-design-icons/FitToScreenOutline.vue";
const props = defineProps<{
fetchAssetDependencies?: () => Promise<{
data: any[];
count: number;
}>;
}>();
const SUBTYPE = route.name === "flows/update" ? FLOW : route.name === "namespaces/update" ? NAMESPACE : route.name === "assets/update" ? ASSET : EXECUTION;
const SUBTYPE = route.name === "flows/update" ? FLOW : route.name === "namespaces/update" ? NAMESPACE : EXECUTION;
const container = ref(null);
const initialNodeID: string = SUBTYPE === FLOW || SUBTYPE === NAMESPACE || SUBTYPE === ASSET ? String(route.params.id || route.params.assetId) : String(route.params.flowId);
const initialNodeID: string = SUBTYPE === FLOW || SUBTYPE === NAMESPACE ? String(route.params.id) : String(route.params.flowId);
const TESTING = false; // When true, bypasses API data fetching and uses mock/test data.
const {
getElements,
isLoading,
isRendering,
selectedNodeID,
selectNode,
handlers,
} = useDependencies(container, SUBTYPE, initialNodeID, route.params, TESTING, props.fetchAssetDependencies);
const {getElements, isLoading, isRendering, selectedNodeID, selectNode, handlers} = useDependencies(container, SUBTYPE, initialNodeID, route.params, TESTING);
</script>
<style scoped lang="scss">
@@ -110,7 +95,7 @@
& .controls {
position: absolute;
bottom: 16px;
bottom: 10px;
left: 10px;
display: flex;
flex-direction: column;

View File

@@ -9,32 +9,26 @@
<script setup lang="ts">
import {computed} from "vue";
import {FLOW, EXECUTION, NAMESPACE, ASSET, type Node} from "../utils/types";
import {FLOW, EXECUTION, NAMESPACE, type Node} from "../utils/types";
const props = defineProps<{
node: Node;
subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE | typeof ASSET;
subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE;
}>();
const to = computed(() => {
const base = {namespace: props.node.namespace};
if (props.subtype === ASSET) {
return {
name: "assets/update",
params: {...base, assetId: props.node.flow},
};
} else if ("id" in props.node.metadata && props.node.metadata.id) {
if ("id" in props.node.metadata && props.node.metadata.id)
return {
name: "executions/update",
params: {...base, flowId: props.node.flow, id: props.node.metadata.id},
};
} else {
else
return {
name: "flows/update",
params: {...base, id: props.node.flow},
};
}
});
</script>

View File

@@ -2,7 +2,7 @@
<section id="input">
<el-input
v-model="search"
:placeholder="$t(props.subtype === ASSET ? 'dependency.search.asset_placeholder' : 'dependency.search.placeholder')"
:placeholder="$t('dependency.search.placeholder')"
clearable
/>
</section>
@@ -38,13 +38,10 @@
size="small"
/>
<RouterLink
v-if="[FLOW, NAMESPACE, ASSET].includes(row.data.metadata.subtype)"
v-if="[FLOW, NAMESPACE].includes(row.data.metadata.subtype)"
:to="{
name: row.data.metadata.subtype === ASSET ? 'assets/update' : 'flows/update',
params: row.data.metadata.subtype === ASSET
? {namespace: row.data.namespace, assetId: row.data.flow}
: {namespace: row.data.namespace, id: row.data.flow}
}"
name: 'flows/update',
params: {namespace: row.data.namespace, id: row.data.flow}}"
>
<el-icon :size="16">
<OpenInNew />
@@ -67,13 +64,12 @@
import OpenInNew from "vue-material-design-icons/OpenInNew.vue";
import {NODE, FLOW, EXECUTION, NAMESPACE, ASSET, type Node} from "../utils/types";
import {NODE, FLOW, EXECUTION, NAMESPACE, type Node} from "../utils/types";
const emits = defineEmits<{ (e: "select", id: Node["id"]): void }>();
const props = defineProps<{
elements: cytoscape.ElementDefinition[];
selected: Node["id"] | undefined;
subtype?: typeof FLOW | typeof EXECUTION | typeof NAMESPACE | typeof ASSET;
}>();
const focusSelectedRow = () => {
@@ -181,10 +177,6 @@ section#row {
& section#right {
flex-shrink: 0;
margin-left: 0.5rem;
:deep(a:hover .el-icon) {
color: var(--ks-content-link-hover);
}
}
}
</style>

View File

@@ -163,7 +163,7 @@ const setExecutionEdgeColors = throttle(
* @param classes - An array of class names to remove from all elements.
* Defaults to [`selected`, `faded`, `hovered`, `executions`].
*/
export function clearClasses(cy: cytoscape.Core, subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE | typeof ASSET, classes: string[] = [SELECTED, FADED, HOVERED, EXECUTIONS]): void {
export function clearClasses(cy: cytoscape.Core, subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE, classes: string[] = [SELECTED, FADED, HOVERED, EXECUTIONS]): void {
cy.elements().removeClass(classes.join(" "));
if (subtype === EXECUTION) cy.edges().style(edgeColors());
}
@@ -197,7 +197,7 @@ export function fit(cy: cytoscape.Core, padding: number = 50): void {
* @param subtype - Determines how connected elements are highlighted (`FLOW`, `EXECUTION` or `NAMESPACE`).
* @param id - Optional explicit ID to assign to the ref (defaults to the nodes own ID).
*/
function selectHandler(cy: cytoscape.Core, node: cytoscape.NodeSingular, selected: Ref<Node["id"] | undefined>, subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE | typeof ASSET, id?: Node["id"]): void {
function selectHandler(cy: cytoscape.Core, node: cytoscape.NodeSingular, selected: Ref<Node["id"] | undefined>, subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE, id?: Node["id"]): void {
// Clear all existing classes
clearClasses(cy, subtype);
@@ -263,17 +263,7 @@ function hoverHandler(cy: cytoscape.Core): void {
* @returns An object with element getters, loading state, rendering state, selected node ID,
* selection helpers, and control handlers.
*/
export function useDependencies(
container: Ref<HTMLElement | null>,
subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE | typeof ASSET = FLOW,
initialNodeID: string,
params: RouteParams,
isTesting = false,
fetchAssetDependencies?: () => Promise<{
data: Element[];
count: number;
}>
) {
export function useDependencies(container: Ref<HTMLElement | null>, subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE = FLOW, initialNodeID: string, params: RouteParams, isTesting = false) {
const coreStore = useCoreStore();
const flowStore = useFlowStore();
const executionsStore = useExecutionsStore();
@@ -311,13 +301,7 @@ export function useDependencies(
}
};
const elements = ref<{
data: cytoscape.ElementDefinition[];
count: number;
}>({
data: [],
count: 0,
});
const elements = ref<{ data: cytoscape.ElementDefinition[]; count: number; }>({data: [], count: 0});
onMounted(async () => {
if (isTesting) {
if (!container.value) {
@@ -329,32 +313,13 @@ export function useDependencies(
isLoading.value = false;
} else {
try {
if (fetchAssetDependencies) {
const result = await fetchAssetDependencies();
elements.value = {
data: result.data,
count: result.count
};
isLoading.value = false;
} else if (subtype === NAMESPACE) {
const {data} = await namespacesStore.loadDependencies({
namespace: params.id as string,
});
if (subtype === NAMESPACE) {
const {data} = await namespacesStore.loadDependencies({namespace: params.id as string});
const nodes = data.nodes ?? [];
elements.value = {
data: transformResponse(data, NAMESPACE),
count: new Set(nodes.map((r: { uid: string }) => r.uid)).size,
};
elements.value = {data: transformResponse(data, NAMESPACE), count: new Set(nodes.map((r: { uid: string }) => r.uid)).size};
isLoading.value = false;
} else {
const result = await flowStore.loadDependencies(
{
id: (subtype === FLOW ? params.id : params.flowId) as string,
namespace: params.namespace as string,
subtype,
},
false
);
const result = await flowStore.loadDependencies({id: (subtype === FLOW ? params.id : params.flowId) as string, namespace: params.namespace as string, subtype}, false);
elements.value = {data: result.data ?? [], count: result.count};
isLoading.value = false;
}
@@ -483,16 +448,8 @@ export function useDependencies(
selectedNodeID,
selectNode,
handlers: {
zoomIn: () =>
cy.zoom({
level: cy.zoom() + 0.1,
renderedPosition: cy.getElementById(selectedNodeID.value!).renderedPosition(),
}),
zoomOut: () =>
cy.zoom({
level: cy.zoom() - 0.1,
renderedPosition: cy.getElementById(selectedNodeID.value!).renderedPosition(),
}),
zoomIn: () => cy.zoom({level: cy.zoom() + 0.1, renderedPosition: cy.getElementById(selectedNodeID.value!).renderedPosition()}),
zoomOut: () => cy.zoom({level: cy.zoom() - 0.1, renderedPosition: cy.getElementById(selectedNodeID.value!).renderedPosition()}),
clearSelection: () => {
clearClasses(cy, subtype);
selectedNodeID.value = undefined;
@@ -511,23 +468,9 @@ export function useDependencies(
* @param subtype - The node subtype, either `FLOW`, `EXECUTION`, or `NAMESPACE`.
* @returns An array of cytoscape elements with correctly typed nodes and edges.
*/
export function transformResponse(response: {nodes: { uid: string; namespace: string; id: string }[]; edges: { source: string; target: string }[];}, subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE): Element[] {
const nodes: Node[] = response.nodes.map((node) => ({
id: node.uid,
type: NODE,
flow: node.id,
namespace: node.namespace,
metadata: {subtype},
}));
const edges: Edge[] = response.edges.map((edge) => ({
id: uuid(),
type: EDGE,
source: edge.source,
target: edge.target,
}));
export function transformResponse(response: {nodes: { uid: string; namespace: string; id: string }[]; edges: { source: string; target: string }[]; }, subtype: typeof FLOW | typeof EXECUTION | typeof NAMESPACE): Element[] {
const nodes: Node[] = response.nodes.map((node) => ({id: node.uid, type: NODE, flow: node.id, namespace: node.namespace, metadata: {subtype}}));
const edges: Edge[] = response.edges.map((edge) => ({id: uuid(), type: EDGE, source: edge.source, target: edge.target}));
return [
...nodes.map((node) => ({data: node}) as Element),
...edges.map((edge) => ({data: edge}) as Element),
];
return [...nodes.map((node) => ({data: node}) as Element), ...edges.map((edge) => ({data: edge}) as Element)];
}

View File

@@ -4,7 +4,6 @@ export const EDGE = "EDGE" as const;
export const FLOW = "FLOW" as const;
export const EXECUTION = "EXECUTION" as const;
export const NAMESPACE = "NAMESPACE" as const;
export const ASSET = "ASSET" as const;
type Flow = {
subtype: typeof FLOW;
@@ -20,16 +19,12 @@ type Namespace = {
subtype: typeof NAMESPACE;
};
type Asset = {
subtype: typeof ASSET;
};
export type Node = {
id: string;
type: "NODE";
flow: string;
namespace: string;
metadata: Flow | Execution | Namespace | Asset;
metadata: Flow | Execution | Namespace;
};
export type Edge = {

View File

@@ -25,6 +25,21 @@
<Row :rows="general" />
</div>
<el-divider />
<div class="labels">
<Row :rows="[{icon: LabelMultiple, label: $t('labels')}]">
<template #action>
<SetLabels :execution />
</template>
</Row>
<Labels :labels="execution.labels || []" />
</div>
<el-divider />
<div class="metadata">
<Row :rows="metadata" />
</div>
<el-divider />
<div class="actions">
<Row
@@ -45,21 +60,6 @@
</el-col>
</el-row>
</div>
<el-divider />
<div class="metadata">
<Row :rows="metadata" />
</div>
<el-divider />
<div class="labels">
<Row :rows="[{icon: LabelMultiple, label: $t('labels')}]">
<template #action>
<SetLabels :execution />
</template>
</Row>
<Labels :labels="execution.labels || []" />
</div>
</div>
</el-splitter-panel>
@@ -192,7 +192,7 @@
import ErrorAlert from "./components/main/ErrorAlert.vue";
import Id from "../../Id.vue";
import Cascader, {type Element} from "./components/main/cascaders/Cascader.vue";
import Cascader from "./components/main/cascaders/Cascader.vue";
import TimeSeries from "../../dashboard/sections/TimeSeries.vue";
import PrevNext from "./components/main/PrevNext.vue";
@@ -406,7 +406,7 @@
);
};
const cascaders: Element[] = [
const cascaders = [
{
title: t("variables"),
empty: t("no_variables"),

View File

@@ -83,20 +83,13 @@
children?: Node[];
}
type DebugTypes = "outputs" | "trigger";
export type Element = {
const props = defineProps<{
title: string;
empty: string;
elements?: Record<string, any>;
includeDebug?: DebugTypes | undefined;
}
const props = defineProps<
Element & {
execution: Execution;
}
>();
includeDebug?: "outputs" | "trigger";
execution: Execution;
}>();
const path = ref<string>("");

View File

@@ -152,8 +152,6 @@
font-size: 12px;
color: var(--ks-content-primary);
white-space: nowrap;
display: flex;
align-items: center;
}
.value {
font-weight: 700;

View File

@@ -86,7 +86,7 @@
);
const isKVPairFilter = computed(() =>
props.filterKey?.valueType === "key-value"
props.filterKey?.valueType === "key-value" || (props.filterKey?.key === "labels" && KV_COMPARATORS.includes(state.selectedComparator))
);
const valueComponent = computed(() => {

View File

@@ -14,6 +14,7 @@ import {
COMPARATOR_LABELS,
Comparators,
TEXT_COMPARATORS,
KV_COMPARATORS
} from "../utils/filterTypes";
import {usePreAppliedFilters} from "./usePreAppliedFilters";
import {useDefaultFilter} from "./useDefaultFilter";
@@ -66,11 +67,11 @@ export function useFilters(
};
const clearLegacyParams = (query: Record<string, any>) => {
configuration.keys?.forEach(({key, valueType}) => {
configuration.keys?.forEach(({key}) => {
delete query[key];
if (valueType === "key-value") {
if (key === "details") {
Object.keys(query).forEach(queryKey => {
if (queryKey.startsWith(`${key}.`)) delete query[queryKey];
if (queryKey.startsWith("details.")) delete query[queryKey];
});
}
});
@@ -84,10 +85,10 @@ export function useFilters(
*/
const buildLegacyQuery = (query: Record<string, any>) => {
getUniqueFilters(appliedFilters.value.filter(isValidFilter)).forEach(filter => {
if (configuration.keys?.find(k => k.key === filter.key)?.valueType === "key-value") {
if (filter.key === "details") {
(filter.value as string[]).forEach(item => {
const [k, v] = item.split(":");
query[`${filter.key}.${k}`] = v;
query[`details.${k}`] = v;
});
} else if (Array.isArray(filter.value)) {
filter.value.forEach(item =>
@@ -107,6 +108,8 @@ export function useFilters(
const query = {...route.query};
clearFilterQueryParams(query);
delete query.page;
if (legacyQuery) {
clearLegacyParams(query);
buildLegacyQuery(query);
@@ -116,15 +119,6 @@ export function useFilters(
}
updateSearchQuery(query);
if (
(appliedFilters.value.some(f => Array.isArray(f.value) && f.value.length > 0)
|| searchQuery.value.trim())
&& parseInt(String(query.page ?? "1")) > 1
) {
delete query.page;
}
router.push({query});
};
@@ -151,13 +145,14 @@ export function useFilters(
value: string | string[]
): AppliedFilter => {
const comparator = (config?.comparators?.[0] as Comparators) ?? Comparators.EQUALS;
return createAppliedFilter(key, config, comparator, value,
config?.valueType === "key-value" && Array.isArray(value)
? value.length > 1 ? `${value[0]} +${value.length - 1}` : value[0] ?? ""
const valueLabel = Array.isArray(value)
? key === "details" && value.length > 1
? `${value[0]} +${value.length - 1}`
: Array.isArray(value)
? value.join(", ")
: value as string
, "EQUALS");
: value[0]
: (value as string);
return createAppliedFilter(key, config, comparator, value, valueLabel, "EQUALS");
};
const createTimeRangeFilter = (
@@ -166,13 +161,14 @@ export function useFilters(
endDate: Date,
comparator = Comparators.EQUALS
): AppliedFilter => {
const valueLabel = `${startDate.toLocaleDateString()} - ${endDate.toLocaleDateString()}`;
return {
...createAppliedFilter(
"timeRange",
config,
comparator,
{startDate, endDate},
`${startDate.toLocaleDateString()} - ${endDate.toLocaleDateString()}`,
valueLabel,
keyOfComparator(comparator)
),
comparatorLabel: "Is Between"
@@ -185,36 +181,34 @@ export function useFilters(
*/
const parseLegacyFilters = (): AppliedFilter[] => {
const filtersMap = new Map<string, AppliedFilter>();
const keyValueFilters: Record<string, string[]> = {};
const details: string[] = [];
Object.entries(route.query).forEach(([key, value]) => {
if (["q", "search", "filters[q][EQUALS]"].includes(key)) return;
const kvConfig = configuration.keys?.find(k => key.startsWith(`${k.key}.`) && k.valueType === "key-value");
if (kvConfig) {
if (!keyValueFilters[kvConfig.key]) keyValueFilters[kvConfig.key] = [];
keyValueFilters[kvConfig.key].push(`${key.split(".")[1]}:${value}`);
if (key.startsWith("details.")) {
details.push(`${key.split(".")[1]}:${value}`);
return;
}
const config = configuration.keys?.find(k => k.key === key);
if (!config) return;
filtersMap.set(key, createFilter(key, config,
Array.isArray(value)
? (value as string[]).filter(v => v !== null)
: config?.valueType === "multi-select"
? ((value as string) ?? "").split(",")
: ((value as string) ?? "")
));
const processedValue = Array.isArray(value)
? (value as string[]).filter(v => v !== null)
: config?.valueType === "multi-select"
? ((value as string) ?? "").split(",")
: ((value as string) ?? "");
filtersMap.set(key, createFilter(key, config, processedValue));
});
Object.entries(keyValueFilters).forEach(([key, values]) => {
const config = configuration.keys?.find(k => k.key === key);
if (details.length > 0) {
const config = configuration.keys?.find(k => k.key === "details");
if (config) {
filtersMap.set(key, createFilter(key, config, values));
filtersMap.set("details", createFilter("details", config, details));
}
});
}
if (route.query.startDate && route.query.endDate) {
const timeRangeConfig = configuration.keys?.find(k => k.key === "timeRange");
@@ -233,10 +227,13 @@ export function useFilters(
return Array.from(filtersMap.values());
};
const processFieldValue = (config: any, params: any[], _field: string, comparator: Comparators) => {
const isKVFilter = (field: string, comparator: Comparators) =>
field === "details" || (field === "labels" && KV_COMPARATORS.includes(comparator));
const processFieldValue = (config: any, params: any[], field: string, comparator: Comparators) => {
const isTextOp = TEXT_COMPARATORS.includes(comparator);
if (config?.valueType === "key-value") {
if (isKVFilter(field, comparator)) {
const combinedValue = params.map(p => p?.value as string);
return {
value: combinedValue,
@@ -256,9 +253,10 @@ export function useFilters(
};
}
let value = Array.isArray(params[0]?.value)
? params[0].value[0]
: (params[0]?.value as string);
const param = params[0];
let value = Array.isArray(param?.value)
? param.value[0]
: (param?.value as string);
if (config?.valueType === "date" && typeof value === "string") {
value = new Date(value);

View File

@@ -103,10 +103,6 @@ export function useValues(label: string | undefined, t?: ReturnType<typeof useI1
STATUSES: buildFromArray(["PENDING", "ACCEPTED", "EXPIRED"]),
AGGREGATIONS: buildFromArray(["SUM", "AVG", "MIN", "MAX"]),
RELATIVE_DATE,
TRIGGER_STATES:[
{label: t("filter.triggerState.enabled"), value: "enabled"},
{label: t("filter.triggerState.disabled"), value: "disabled"}
]
};
return {VALUES, getRelativeDateLabel};

View File

@@ -127,7 +127,7 @@ export const useExecutionFilter = (): ComputedRef<FilterConfiguration> => {
label: t("filter.labels_execution.label"),
description: t("filter.labels_execution.description"),
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
valueType: "key-value",
valueType: "text",
},
{
key: "triggerExecutionId",

View File

@@ -74,7 +74,7 @@ export const useFlowExecutionFilter = (): ComputedRef<FilterConfiguration> => {
label: t("filter.labels_execution.label"),
description: t("filter.labels_execution.description"),
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
valueType: "key-value",
valueType: "text",
},
{
key: "triggerExecutionId",

View File

@@ -67,7 +67,7 @@ export const useFlowFilter = (): ComputedRef<FilterConfiguration> => {
label: t("filter.labels_flow.label"),
description: t("filter.labels_flow.description"),
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
valueType: "key-value",
valueType: "text",
},
]
};

Some files were not shown because too many files have changed in this diff Show More