refactor: add GenericFlow to support un-typed flow deserialization

Add new FlowId, FlowInterface and GenericFlow classes to support
deserialization of flow with un-typed plugins (i.e., tasks, triggers)
in order to inject defaults prior to strongly-typed deserialization.
This commit is contained in:
Florian Hussonnois
2025-03-14 14:33:21 +01:00
committed by Florian Hussonnois
parent fc8732f96e
commit 8f29a72df7
124 changed files with 2420 additions and 1609 deletions

View File

@@ -74,10 +74,9 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
} }
} }
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation // bug in micronaut, we can't inject ModelValidator, so we inject from implementation
public Integer call( public Integer call(
Class<?> cls, Class<?> cls,
YamlParser yamlParser,
ModelValidator modelValidator, ModelValidator modelValidator,
Function<Object, String> identity, Function<Object, String> identity,
Function<Object, List<String>> warningsFunction, Function<Object, List<String>> warningsFunction,
@@ -94,7 +93,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
.filter(YamlParser::isValidExtension) .filter(YamlParser::isValidExtension)
.forEach(path -> { .forEach(path -> {
try { try {
Object parse = yamlParser.parse(path.toFile(), cls); Object parse = YamlParser.parse(path.toFile(), cls);
modelValidator.validate(parse); modelValidator.validate(parse);
stdOut("@|green \u2713|@ - " + identity.apply(parse)); stdOut("@|green \u2713|@ - " + identity.apply(parse));
List<String> warnings = warningsFunction.apply(parse); List<String> warnings = warningsFunction.apply(parse);

View File

@@ -29,8 +29,7 @@ public class FlowDotCommand extends AbstractCommand {
public Integer call() throws Exception { public Integer call() throws Exception {
super.call(); super.call();
YamlParser parser = applicationContext.getBean(YamlParser.class); Flow flow = YamlParser.parse(file.toFile(), Flow.class);
Flow flow = parser.parse(file.toFile(), Flow.class);
GraphCluster graph = GraphUtils.of(flow, null); GraphCluster graph = GraphUtils.of(flow, null);

View File

@@ -20,9 +20,6 @@ public class FlowExpandCommand extends AbstractCommand {
@CommandLine.Parameters(index = "0", description = "The flow file to expand") @CommandLine.Parameters(index = "0", description = "The flow file to expand")
private Path file; private Path file;
@Inject
private YamlParser yamlParser;
@Inject @Inject
private ModelValidator modelValidator; private ModelValidator modelValidator;
@@ -31,7 +28,7 @@ public class FlowExpandCommand extends AbstractCommand {
super.call(); super.call();
stdErr("Warning, this functionality is deprecated and will be removed at some point."); stdErr("Warning, this functionality is deprecated and will be removed at some point.");
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent()); String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
Flow flow = yamlParser.parse(content, Flow.class); Flow flow = YamlParser.parse(content, Flow.class);
modelValidator.validate(flow); modelValidator.validate(flow);
stdOut(content); stdOut(content);
return 0; return 0;

View File

@@ -1,9 +1,8 @@
package io.kestra.cli.commands.flows; package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractValidateCommand; import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.validations.ModelValidator; import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.services.FlowService; import io.kestra.core.services.FlowService;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import picocli.CommandLine; import picocli.CommandLine;
@@ -16,8 +15,6 @@ import java.util.List;
description = "Validate a flow" description = "Validate a flow"
) )
public class FlowValidateCommand extends AbstractValidateCommand { public class FlowValidateCommand extends AbstractValidateCommand {
@Inject
private YamlParser yamlParser;
@Inject @Inject
private ModelValidator modelValidator; private ModelValidator modelValidator;
@@ -28,23 +25,22 @@ public class FlowValidateCommand extends AbstractValidateCommand {
@Override @Override
public Integer call() throws Exception { public Integer call() throws Exception {
return this.call( return this.call(
Flow.class, FlowWithSource.class,
yamlParser,
modelValidator, modelValidator,
(Object object) -> { (Object object) -> {
Flow flow = (Flow) object; FlowWithSource flow = (FlowWithSource) object;
return flow.getNamespace() + " / " + flow.getId(); return flow.getNamespace() + " / " + flow.getId();
}, },
(Object object) -> { (Object object) -> {
Flow flow = (Flow) object; FlowWithSource flow = (FlowWithSource) object;
List<String> warnings = new ArrayList<>(); List<String> warnings = new ArrayList<>();
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList()); warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
warnings.addAll(flowService.warnings(flow, this.tenantId)); warnings.addAll(flowService.warnings(flow, this.tenantId));
return warnings; return warnings;
}, },
(Object object) -> { (Object object) -> {
Flow flow = (Flow) object; FlowWithSource flow = (FlowWithSource) object;
return flowService.relocations(flow.generateSource()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList(); return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
} }
); );
} }

View File

@@ -10,7 +10,6 @@ import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest; import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException; import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient; import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException; import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine; import picocli.CommandLine;
@@ -27,8 +26,6 @@ import java.util.List;
) )
@Slf4j @Slf4j
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand { public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@Inject
public YamlParser yamlParser;
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided") @CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
public boolean override = false; public boolean override = false;

View File

@@ -2,6 +2,7 @@ package io.kestra.cli.commands.sys;
import io.kestra.cli.AbstractCommand; import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.micronaut.context.ApplicationContext; import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject; import jakarta.inject.Inject;
@@ -9,6 +10,7 @@ import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine; import picocli.CommandLine;
import java.util.List; import java.util.List;
import java.util.Objects;
@CommandLine.Command( @CommandLine.Command(
name = "reindex", name = "reindex",
@@ -33,8 +35,8 @@ public class ReindexCommand extends AbstractCommand {
List<Flow> allFlow = flowRepository.findAllForAllTenants(); List<Flow> allFlow = flowRepository.findAllForAllTenants();
allFlow.stream() allFlow.stream()
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null)) .map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
.filter(flow -> flow != null) .filter(Objects::nonNull)
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow())); .forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
stdOut("Successfully reindex " + allFlow.size() + " flow(s)."); stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
} }

View File

@@ -4,7 +4,6 @@ import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.models.templates.Template; import io.kestra.core.models.templates.Template;
import io.kestra.core.models.templates.TemplateEnabled; import io.kestra.core.models.templates.TemplateEnabled;
import io.kestra.core.models.validations.ModelValidator; import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import picocli.CommandLine; import picocli.CommandLine;
@@ -16,8 +15,6 @@ import java.util.Collections;
) )
@TemplateEnabled @TemplateEnabled
public class TemplateValidateCommand extends AbstractValidateCommand { public class TemplateValidateCommand extends AbstractValidateCommand {
@Inject
private YamlParser yamlParser;
@Inject @Inject
private ModelValidator modelValidator; private ModelValidator modelValidator;
@@ -26,7 +23,6 @@ public class TemplateValidateCommand extends AbstractValidateCommand {
public Integer call() throws Exception { public Integer call() throws Exception {
return this.call( return this.call(
Template.class, Template.class,
yamlParser,
modelValidator, modelValidator,
(Object object) -> { (Object object) -> {
Template template = (Template) object; Template template = (Template) object;

View File

@@ -10,7 +10,6 @@ import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest; import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException; import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient; import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine; import picocli.CommandLine;
@@ -27,8 +26,6 @@ import jakarta.validation.ConstraintViolationException;
@Slf4j @Slf4j
@TemplateEnabled @TemplateEnabled
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand { public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@Inject
public YamlParser yamlParser;
@Override @Override
public Integer call() throws Exception { public Integer call() throws Exception {
@@ -38,7 +35,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
List<Template> templates = files List<Template> templates = files
.filter(Files::isRegularFile) .filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension) .filter(YamlParser::isValidExtension)
.map(path -> yamlParser.parse(path.toFile(), Template.class)) .map(path -> YamlParser.parse(path.toFile(), Template.class))
.toList(); .toList();
if (templates.isEmpty()) { if (templates.isEmpty()) {

View File

@@ -1,11 +1,12 @@
package io.kestra.cli.services; package io.kestra.cli.services;
import io.kestra.core.models.flows.Flow; import io.kestra.core.exceptions.DeserializationException;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithPath; import io.kestra.core.models.flows.FlowWithPath;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.validations.ModelValidator; import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.services.FlowListenersInterface; import io.kestra.core.services.FlowListenersInterface;
import io.kestra.core.services.PluginDefaultService; import io.kestra.core.services.PluginDefaultService;
import io.micronaut.context.annotation.Requires; import io.micronaut.context.annotation.Requires;
@@ -40,9 +41,6 @@ public class FileChangedEventListener {
@Inject @Inject
private PluginDefaultService pluginDefaultService; private PluginDefaultService pluginDefaultService;
@Inject
private YamlParser yamlParser;
@Inject @Inject
private ModelValidator modelValidator; private ModelValidator modelValidator;
@@ -59,7 +57,6 @@ public class FileChangedEventListener {
private boolean isStarted = false; private boolean isStarted = false;
@Inject @Inject
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) { public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
this.fileWatchConfiguration = fileWatchConfiguration; this.fileWatchConfiguration = fileWatchConfiguration;
@@ -68,7 +65,7 @@ public class FileChangedEventListener {
public void startListeningFromConfig() throws IOException, InterruptedException { public void startListeningFromConfig() throws IOException, InterruptedException {
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) { if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface, pluginDefaultService); this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
List<Path> paths = fileWatchConfiguration.getPaths(); List<Path> paths = fileWatchConfiguration.getPaths();
this.setup(paths); this.setup(paths);
@@ -76,7 +73,7 @@ public class FileChangedEventListener {
// Init existing flows not already in files // Init existing flows not already in files
flowListeners.listen(flows -> { flowListeners.listen(flows -> {
if (!isStarted) { if (!isStarted) {
for (FlowWithSource flow : flows) { for (FlowInterface flow : flows) {
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) { if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
flowToFile(flow, this.buildPath(flow)); flowToFile(flow, this.buildPath(flow));
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString())); this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
@@ -137,7 +134,7 @@ public class FileChangedEventListener {
try { try {
String content = Files.readString(filePath, Charset.defaultCharset()); String content = Files.readString(filePath, Charset.defaultCharset());
Optional<Flow> flow = parseFlow(content, entry); Optional<FlowWithSource> flow = parseFlow(content, entry);
if (flow.isPresent()) { if (flow.isPresent()) {
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) { if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
// Check if we already have a file with the given path // Check if we already have a file with the given path
@@ -156,7 +153,7 @@ public class FileChangedEventListener {
flows.add(FlowWithPath.of(flow.get(), filePath.toString())); flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
} }
flowFilesManager.createOrUpdateFlow(flow.get(), content); flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry); log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
} }
@@ -207,11 +204,11 @@ public class FileChangedEventListener {
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) { if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) {
String content = Files.readString(file, Charset.defaultCharset()); String content = Files.readString(file, Charset.defaultCharset());
Optional<Flow> flow = parseFlow(content, file); Optional<FlowWithSource> flow = parseFlow(content, file);
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) { if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
flows.add(FlowWithPath.of(flow.get(), file.toString())); flows.add(FlowWithPath.of(flow.get(), file.toString()));
flowFilesManager.createOrUpdateFlow(flow.get(), content); flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
} }
} }
return FileVisitResult.CONTINUE; return FileVisitResult.CONTINUE;
@@ -223,27 +220,25 @@ public class FileChangedEventListener {
} }
} }
private void flowToFile(FlowWithSource flow, Path path) { private void flowToFile(FlowInterface flow, Path path) {
Path defaultPath = path != null ? path : this.buildPath(flow); Path defaultPath = path != null ? path : this.buildPath(flow);
try { try {
Files.writeString(defaultPath, flow.getSource()); Files.writeString(defaultPath, flow.source());
log.info("Flow {} has been written to file {}", flow.getId(), defaultPath); log.info("Flow {} has been written to file {}", flow.getId(), defaultPath);
} catch (IOException e) { } catch (IOException e) {
log.error("Error writing file: {}", defaultPath, e); log.error("Error writing file: {}", defaultPath, e);
} }
} }
private Optional<Flow> parseFlow(String content, Path entry) { private Optional<FlowWithSource> parseFlow(String content, Path entry) {
try { try {
Flow flow = yamlParser.parse(content, Flow.class); FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
FlowWithSource withPluginDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content)); modelValidator.validate(flow);
modelValidator.validate(withPluginDefault);
return Optional.of(flow); return Optional.of(flow);
} catch (ConstraintViolationException e) { } catch (DeserializationException | ConstraintViolationException e) {
log.warn("Error while parsing flow: {}", entry, e); log.warn("Error while parsing flow: {}", entry, e);
} }
return Optional.empty(); return Optional.empty();
} }
@@ -259,7 +254,7 @@ public class FileChangedEventListener {
} }
} }
private Path buildPath(Flow flow) { private Path buildPath(FlowInterface flow) {
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml"); return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
} }
} }

View File

@@ -1,11 +1,11 @@
package io.kestra.cli.services; package io.kestra.cli.services;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
public interface FlowFilesManager { public interface FlowFilesManager {
FlowWithSource createOrUpdateFlow(Flow flow, String content); FlowWithSource createOrUpdateFlow(GenericFlow flow);
void deleteFlow(FlowWithSource toDelete); void deleteFlow(FlowWithSource toDelete);

View File

@@ -1,27 +1,23 @@
package io.kestra.cli.services; package io.kestra.cli.services;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.PluginDefaultService;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@Slf4j @Slf4j
public class LocalFlowFileWatcher implements FlowFilesManager { public class LocalFlowFileWatcher implements FlowFilesManager {
private final FlowRepositoryInterface flowRepository; private final FlowRepositoryInterface flowRepository;
private final PluginDefaultService pluginDefaultService;
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository, PluginDefaultService pluginDefaultService) { public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
this.flowRepository = flowRepository; this.flowRepository = flowRepository;
this.pluginDefaultService = pluginDefaultService;
} }
@Override @Override
public FlowWithSource createOrUpdateFlow(Flow flow, String content) { public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
FlowWithSource withDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
return flowRepository.findById(null, flow.getNamespace(), flow.getId()) return flowRepository.findById(null, flow.getNamespace(), flow.getId())
.map(previous -> flowRepository.update(flow, previous, content, withDefault)) .map(previous -> flowRepository.update(flow, previous))
.orElseGet(() -> flowRepository.create(flow, content, withDefault)); .orElseGet(() -> flowRepository.create(flow));
} }
@Override @Override

View File

@@ -1,16 +1,15 @@
package io.kestra.cli.commands.sys.statestore; package io.kestra.cli.commands.sys.statestore;
import com.devskiller.friendly_id.FriendlyId;
import io.kestra.core.exceptions.MigrationRequiredException; import io.kestra.core.exceptions.MigrationRequiredException;
import io.kestra.core.exceptions.ResourceExpiredException; import io.kestra.core.exceptions.ResourceExpiredException;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.RunContextFactory; import io.kestra.core.runners.RunContextFactory;
import io.kestra.core.storages.StateStore; import io.kestra.core.storages.StateStore;
import io.kestra.core.storages.StorageInterface; import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.Hashing; import io.kestra.core.utils.Hashing;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.Slugify; import io.kestra.core.utils.Slugify;
import io.kestra.plugin.core.log.Log; import io.kestra.plugin.core.log.Log;
import io.micronaut.configuration.picocli.PicocliRunner; import io.micronaut.configuration.picocli.PicocliRunner;
@@ -27,7 +26,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
class StateStoreMigrateCommandTest { class StateStoreMigrateCommandTest {
@@ -45,7 +43,7 @@ class StateStoreMigrateCommandTest {
.namespace("some.valid.namespace." + ((int) (Math.random() * 1000000))) .namespace("some.valid.namespace." + ((int) (Math.random() * 1000000)))
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build())) .tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
.build(); .build();
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
StorageInterface storage = ctx.getBean(StorageInterface.class); StorageInterface storage = ctx.getBean(StorageInterface.class);
String tenantId = flow.getTenantId(); String tenantId = flow.getTenantId();

View File

@@ -23,4 +23,5 @@ public class KestraRuntimeException extends RuntimeException {
public KestraRuntimeException(Throwable cause) { public KestraRuntimeException(Throwable cause) {
super(cause); super(cause);
} }
} }

View File

@@ -1,5 +1,6 @@
package io.kestra.core.models.conditions; package io.kestra.core.models.conditions;
import io.kestra.core.models.flows.FlowInterface;
import lombok.*; import lombok.*;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
@@ -18,7 +19,7 @@ import jakarta.validation.constraints.NotNull;
@AllArgsConstructor @AllArgsConstructor
public class ConditionContext { public class ConditionContext {
@NotNull @NotNull
private Flow flow; private FlowInterface flow;
private Execution execution; private Execution execution;

View File

@@ -14,6 +14,7 @@ import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.TenantInterface; import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.tasks.ResolvedTask; import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.runners.FlowableUtils; import io.kestra.core.runners.FlowableUtils;
@@ -135,8 +136,8 @@ public class Execution implements DeletedInterface, TenantInterface {
* @param labels The Flow labels. * @param labels The Flow labels.
* @return a new {@link Execution}. * @return a new {@link Execution}.
*/ */
public static Execution newExecution(final Flow flow, public static Execution newExecution(final FlowInterface flow,
final BiFunction<Flow, Execution, Map<String, Object>> inputs, final BiFunction<FlowInterface, Execution, Map<String, Object>> inputs,
final List<Label> labels, final List<Label> labels,
final Optional<ZonedDateTime> scheduleDate) { final Optional<ZonedDateTime> scheduleDate) {
Execution execution = builder() Execution execution = builder()

View File

@@ -1,8 +1,12 @@
package io.kestra.core.models.flows; package io.kestra.core.models.flows;
import io.kestra.core.models.DeletedInterface; import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.kestra.core.models.TenantInterface; import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.kestra.core.models.Label;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.swagger.v3.oas.annotations.Hidden; import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import jakarta.validation.constraints.*; import jakarta.validation.constraints.*;
import lombok.Builder; import lombok.Builder;
@@ -11,11 +15,13 @@ import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import java.util.List; import java.util.List;
import java.util.Map;
@SuperBuilder(toBuilder = true) @SuperBuilder(toBuilder = true)
@Getter @Getter
@NoArgsConstructor @NoArgsConstructor
public abstract class AbstractFlow implements DeletedInterface, TenantInterface { @JsonDeserialize
public abstract class AbstractFlow implements FlowInterface {
@NotNull @NotNull
@NotBlank @NotBlank
@Pattern(regexp = "^[a-zA-Z0-9][a-zA-Z0-9._-]*") @Pattern(regexp = "^[a-zA-Z0-9][a-zA-Z0-9._-]*")
@@ -33,6 +39,9 @@ public abstract class AbstractFlow implements DeletedInterface, TenantInterface
@Valid @Valid
List<Input<?>> inputs; List<Input<?>> inputs;
@Valid
List<Output> outputs;
@NotNull @NotNull
@Builder.Default @Builder.Default
boolean disabled = false; boolean disabled = false;
@@ -46,4 +55,11 @@ public abstract class AbstractFlow implements DeletedInterface, TenantInterface
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*") @Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
String tenantId; String tenantId;
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
List<Label> labels;
Map<String, Object> variables;
} }

View File

@@ -6,28 +6,20 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.introspect.AnnotatedMember; import com.fasterxml.jackson.databind.introspect.AnnotatedMember;
import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector; import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector;
import io.kestra.core.exceptions.InternalException; import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.HasUID; import io.kestra.core.models.HasUID;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.PluginProperty; import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.sla.SLA; import io.kestra.core.models.flows.sla.SLA;
import io.kestra.core.models.listeners.Listener; import io.kestra.core.models.listeners.Listener;
import io.kestra.core.models.tasks.FlowableTask; import io.kestra.core.models.tasks.FlowableTask;
import io.kestra.core.models.tasks.Task; import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.retrys.AbstractRetry; import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.models.validations.ManualConstraintViolation; import io.kestra.core.models.validations.ManualConstraintViolation;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.services.FlowService; import io.kestra.core.services.FlowService;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils; import io.kestra.core.utils.ListUtils;
import io.kestra.core.validations.FlowValidation; import io.kestra.core.validations.FlowValidation;
import io.micronaut.core.annotation.Introspected; import io.micronaut.core.annotation.Introspected;
@@ -38,11 +30,18 @@ import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotEmpty;
import lombok.*; import lombok.*;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/**
* A serializable flow with no source.
* <p>
* This class is planned for deprecation - use the {@link FlowWithSource}.
*/
@SuperBuilder(toBuilder = true) @SuperBuilder(toBuilder = true)
@Getter @Getter
@NoArgsConstructor @NoArgsConstructor
@@ -67,11 +66,6 @@ public class Flow extends AbstractFlow implements HasUID {
String description; String description;
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
List<Label> labels;
Map<String, Object> variables; Map<String, Object> variables;
@Valid @Valid
@@ -135,61 +129,6 @@ public class Flow extends AbstractFlow implements HasUID {
@PluginProperty(beta = true) @PluginProperty(beta = true)
List<SLA> sla; List<SLA> sla;
/** {@inheritDoc **/
@Override
@JsonIgnore
public String uid() {
return Flow.uid(this.getTenantId(), this.getNamespace(), this.getId(), Optional.ofNullable(this.revision));
}
@JsonIgnore
public String uidWithoutRevision() {
return Flow.uidWithoutRevision(this.getTenantId(), this.getNamespace(), this.getId());
}
public static String uid(Execution execution) {
return IdUtils.fromParts(
execution.getTenantId(),
execution.getNamespace(),
execution.getFlowId(),
String.valueOf(execution.getFlowRevision())
);
}
public static String uid(String tenantId, String namespace, String id, Optional<Integer> revision) {
return IdUtils.fromParts(
tenantId,
namespace,
id,
String.valueOf(revision.orElse(-1))
);
}
public static String uidWithoutRevision(String tenantId, String namespace, String id) {
return IdUtils.fromParts(
tenantId,
namespace,
id
);
}
public static String uid(Trigger trigger) {
return IdUtils.fromParts(
trigger.getTenantId(),
trigger.getNamespace(),
trigger.getFlowId()
);
}
public static String uidWithoutRevision(Execution execution) {
return IdUtils.fromParts(
execution.getTenantId(),
execution.getNamespace(),
execution.getFlowId()
);
}
public Stream<String> allTypes() { public Stream<String> allTypes() {
return Stream.of( return Stream.of(
Optional.ofNullable(triggers).orElse(Collections.emptyList()).stream().map(AbstractTrigger::getType), Optional.ofNullable(triggers).orElse(Collections.emptyList()).stream().map(AbstractTrigger::getType),
@@ -341,7 +280,7 @@ public class Flow extends AbstractFlow implements HasUID {
); );
} }
public boolean equalsWithoutRevision(Flow o) { public boolean equalsWithoutRevision(FlowInterface o) {
try { try {
return WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(this).equals(WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(o)); return WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(this).equals(WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(o));
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
@@ -381,14 +320,6 @@ public class Flow extends AbstractFlow implements HasUID {
} }
} }
/**
* Convenience method to generate the source of a flow.
* Equivalent to <code>FlowService.generateSource(this);</code>
*/
public String generateSource() {
return FlowService.generateSource(this);
}
public Flow toDeleted() { public Flow toDeleted() {
return this.toBuilder() return this.toBuilder()
.revision(this.revision + 1) .revision(this.revision + 1)
@@ -396,7 +327,13 @@ public class Flow extends AbstractFlow implements HasUID {
.build(); .build();
} }
public FlowWithSource withSource(String source) { /**
return FlowWithSource.of(this, source); * {@inheritDoc}
* To be conservative a flow MUST not return any source.
*/
@Override
@JsonIgnore
public String getSource() {
return null;
} }
} }

View File

@@ -1,7 +1,7 @@
package io.kestra.core.models.flows; package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.TaskForExecution; import io.kestra.core.models.tasks.TaskForExecution;
import io.kestra.core.models.triggers.AbstractTriggerForExecution; import io.kestra.core.models.triggers.AbstractTriggerForExecution;
import io.kestra.core.utils.ListUtils; import io.kestra.core.utils.ListUtils;
@@ -52,4 +52,10 @@ public class FlowForExecution extends AbstractFlow {
.deleted(flow.isDeleted()) .deleted(flow.isDeleted())
.build(); .build();
} }
@JsonIgnore
@Override
public String getSource() {
return null;
}
} }

View File

@@ -0,0 +1,71 @@
package io.kestra.core.models.flows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.utils.IdUtils;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Optional;
/**
* Represents a unique and global identifier for a flow.
*/
public interface FlowId {
String getId();
String getNamespace();
Integer getRevision();
String getTenantId();
static String uid(FlowId flow) {
return uid(flow.getTenantId(), flow.getNamespace(), flow.getId(), Optional.ofNullable(flow.getRevision()));
}
static String uid(String tenantId, String namespace, String id, Optional<Integer> revision) {
return of(tenantId, namespace, id, revision.orElse(-1)).toString();
}
static String uidWithoutRevision(FlowId flow) {
return of(flow.getTenantId(), flow.getNamespace(), flow.getId(), null).toString();
}
static String uidWithoutRevision(String tenantId, String namespace, String id) {
return of(tenantId, namespace, id,null).toString();
}
static String uid(Trigger trigger) {
return of(trigger.getTenantId(), trigger.getNamespace(), trigger.getFlowId(), null).toString();
}
static String uidWithoutRevision(Execution execution) {
return of(execution.getTenantId(), execution.getNamespace(), execution.getFlowId(), null).toString();
}
/**
* Static helper method for constructing a new {@link FlowId}.
*
* @return a new {@link FlowId}.
*/
static FlowId of(String tenantId, String namespace, String id, Integer revision) {
return new Default(tenantId, namespace, id, revision);
}
@Getter
@AllArgsConstructor
class Default implements FlowId {
private final String tenantId;
private final String namespace;
private final String id;
private final Integer revision;
@Override
public String toString() {
return IdUtils.fromParts(tenantId, namespace, id, Optional.ofNullable(revision).map(String::valueOf).orElse(null));
}
}
}

View File

@@ -0,0 +1,194 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.HasSource;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.Label;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.sla.SLA;
import io.kestra.core.serializers.JacksonMapper;
import java.util.AbstractMap;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* The base interface for FLow.
*/
@JsonDeserialize(as = GenericFlow.class)
public interface FlowInterface extends FlowId, DeletedInterface, TenantInterface, HasUID, HasSource {
Pattern YAML_REVISION_MATCHER = Pattern.compile("(?m)^revision: \\d+\n?");
boolean isDisabled();
boolean isDeleted();
List<Label> getLabels();
List<Input<?>> getInputs();
List<Output> getOutputs();
Map<String, Object> getVariables();
default Concurrency getConcurrency() {
return null;
}
default List<SLA> getSla() {
return List.of();
}
String getSource();
@Override
@JsonIgnore
default String source() {
return getSource();
}
@Override
@JsonIgnore
default String uid() {
return FlowId.uid(this);
}
@JsonIgnore
default String uidWithoutRevision() {
return FlowId.uidWithoutRevision(this);
}
/**
* Checks whether this flow is equals to the given flow.
* <p>
* This method is used to compare if two flow revisions are equal.
*
* @param flow The flow to compare.
* @return {@code true} if both flows are the same. Otherwise {@code false}
*/
@JsonIgnore
default boolean isSameWithSource(final FlowInterface flow) {
return
Objects.equals(this.uidWithoutRevision(), flow.uidWithoutRevision()) &&
Objects.equals(this.isDeleted(), flow.isDeleted()) &&
Objects.equals(this.isDisabled(), flow.isDisabled()) &&
Objects.equals(sourceWithoutRevision(this.getSource()), sourceWithoutRevision(flow.getSource()));
}
/**
* Checks whether this flow matches the given {@link FlowId}.
*
* @param that The {@link FlowId}.
* @return {@code true} if the passed id matches this flow.
*/
@JsonIgnore
default boolean isSameId(FlowId that) {
if (that == null) return false;
return
Objects.equals(this.getTenantId(), that.getTenantId()) &&
Objects.equals(this.getNamespace(), that.getNamespace()) &&
Objects.equals(this.getId(), that.getId());
}
/**
* Static method for removing the 'revision' field from a flow.
*
* @param source The source.
* @return The source without revision.
*/
static String sourceWithoutRevision(final String source) {
return YAML_REVISION_MATCHER.matcher(source).replaceFirst("");
}
/**
* Returns the source code for this flow or generate one if {@code null}.
* <p>
* This method must only be used for testing purpose or for handling backward-compatibility.
*
* @return the sourcecode.
*/
default String sourceOrGenerateIfNull() {
return getSource() != null ? getSource() : SourceGenerator.generate(this);
}
/**
* Static helper class for generating source_code from a {@link FlowInterface} object.
*
* <p>
* This class must only be used for testing purpose or for handling backward-compatibility.
*/
class SourceGenerator {
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofJson()
.copy()
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
static String generate(final FlowInterface flow) {
try {
String json = NON_DEFAULT_OBJECT_MAPPER.writeValueAsString(flow);
Object map = SourceGenerator.fixSnakeYaml(JacksonMapper.toMap(json));
String source = JacksonMapper.ofYaml().writeValueAsString(map);
// remove the revision from the generated source
return sourceWithoutRevision(source);
} catch (JsonProcessingException e) {
return null;
}
}
/**
* Dirty hack but only concern previous flow with no source code in org.yaml.snakeyaml.emitter.Emitter:
* <pre>
* if (previousSpace) {
* spaceBreak = true;
* }
* </pre>
* This control will detect ` \n` as a no valid entry on a string and will break the multiline to transform in single line
*
* @param object the object to fix
* @return the modified object
*/
private static Object fixSnakeYaml(Object object) {
if (object instanceof Map<?, ?> mapValue) {
return mapValue
.entrySet()
.stream()
.map(entry -> new AbstractMap.SimpleEntry<>(
fixSnakeYaml(entry.getKey()),
fixSnakeYaml(entry.getValue())
))
.filter(entry -> entry.getValue() != null)
.collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue,
(u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
},
LinkedHashMap::new
));
} else if (object instanceof Collection<?> collectionValue) {
return collectionValue
.stream()
.map(SourceGenerator::fixSnakeYaml)
.toList();
} else if (object instanceof String item) {
if (item.contains("\n")) {
return item.replaceAll("\\s+\\n", "\\\n");
}
}
return object;
}
}
}

View File

@@ -1,14 +1,16 @@
package io.kestra.core.models.flows; package io.kestra.core.models.flows;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import io.kestra.core.models.executions.Execution; import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.core.annotation.Introspected; import io.micronaut.core.annotation.Introspected;
import lombok.EqualsAndHashCode; import lombok.EqualsAndHashCode;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.ToString; import lombok.ToString;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
@@ -21,11 +23,48 @@ import java.util.Optional;
public class FlowWithException extends FlowWithSource { public class FlowWithException extends FlowWithSource {
String exception; String exception;
public static FlowWithException from(final FlowInterface flow, final Exception exception) {
return FlowWithException.builder()
.id(flow.getId())
.tenantId(flow.getTenantId())
.namespace(flow.getNamespace())
.revision(flow.getRevision())
.deleted(flow.isDeleted())
.exception(exception.getMessage())
.tasks(List.of())
.source(flow.getSource())
.build();
}
public static Optional<FlowWithException> from(final String source, final Exception exception, final Logger log) {
log.error("Unable to deserialize a flow: {}", exception.getMessage());
try {
var jsonNode = JacksonMapper.ofJson().readTree(source);
return FlowWithException.from(jsonNode, exception);
} catch (IOException e) {
// if we cannot create a FlowWithException, ignore the message
log.error("Unexpected exception when trying to handle a deserialization error", e);
return Optional.empty();
}
}
public static Optional<FlowWithException> from(JsonNode jsonNode, Exception exception) { public static Optional<FlowWithException> from(JsonNode jsonNode, Exception exception) {
if (jsonNode.hasNonNull("id") && jsonNode.hasNonNull("namespace")) { if (jsonNode.hasNonNull("id") && jsonNode.hasNonNull("namespace")) {
final String tenantId;
if (jsonNode.hasNonNull("tenant_id")) {
// JsonNode is from database
tenantId = jsonNode.get("tenant_id").asText();
} else if (jsonNode.hasNonNull("tenantId")) {
// JsonNode is from queue
tenantId = jsonNode.get("tenantId").asText();
} else {
tenantId = null;
}
var flow = FlowWithException.builder() var flow = FlowWithException.builder()
.id(jsonNode.get("id").asText()) .id(jsonNode.get("id").asText())
.tenantId(jsonNode.hasNonNull("tenant_id") ? jsonNode.get("tenant_id").asText() : null) .tenantId(tenantId)
.namespace(jsonNode.get("namespace").asText()) .namespace(jsonNode.get("namespace").asText())
.revision(jsonNode.hasNonNull("revision") ? jsonNode.get("revision").asInt() : 1) .revision(jsonNode.hasNonNull("revision") ? jsonNode.get("revision").asInt() : 1)
.deleted(jsonNode.hasNonNull("deleted") && jsonNode.get("deleted").asBoolean()) .deleted(jsonNode.hasNonNull("deleted") && jsonNode.get("deleted").asBoolean())
@@ -39,4 +78,10 @@ public class FlowWithException extends FlowWithSource {
// if there is no id and namespace, we return null as we cannot create a meaningful FlowWithException // if there is no id and namespace, we return null as we cannot create a meaningful FlowWithException
return Optional.empty(); return Optional.empty();
} }
/** {@inheritDoc} **/
@Override
public Flow toFlow() {
return this;
}
} }

View File

@@ -18,22 +18,14 @@ import lombok.experimental.SuperBuilder;
@EqualsAndHashCode @EqualsAndHashCode
@FlowValidation @FlowValidation
public class FlowWithPath { public class FlowWithPath {
private FlowWithSource flow; private FlowInterface flow;
@Nullable @Nullable
private String tenantId; private String tenantId;
private String id; private String id;
private String namespace; private String namespace;
private String path; private String path;
public static FlowWithPath of(FlowWithSource flow, String path) { public static FlowWithPath of(FlowInterface flow, String path) {
return FlowWithPath.builder()
.id(flow.getId())
.namespace(flow.getNamespace())
.path(path)
.build();
}
public static FlowWithPath of(Flow flow, String path) {
return FlowWithPath.builder() return FlowWithPath.builder()
.id(flow.getId()) .id(flow.getId())
.namespace(flow.getNamespace()) .namespace(flow.getNamespace())

View File

@@ -1,18 +1,22 @@
package io.kestra.core.models.flows; package io.kestra.core.models.flows;
import io.kestra.core.models.HasSource; import com.fasterxml.jackson.annotation.JsonIgnore;
import io.micronaut.core.annotation.Introspected; import io.micronaut.core.annotation.Introspected;
import lombok.Getter; import lombok.Getter;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import lombok.ToString; import lombok.ToString;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import java.util.Objects;
import java.util.regex.Pattern;
@SuperBuilder(toBuilder = true) @SuperBuilder(toBuilder = true)
@Getter @Getter
@NoArgsConstructor @NoArgsConstructor
@Introspected @Introspected
@ToString @ToString
public class FlowWithSource extends Flow implements HasSource { public class FlowWithSource extends Flow {
String source; String source;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@@ -42,15 +46,13 @@ public class FlowWithSource extends Flow implements HasSource {
.build(); .build();
} }
private static String cleanupSource(String source) { @Override
return source.replaceFirst("(?m)^revision: \\d+\n?",""); @JsonIgnore(value = false)
} public String getSource() {
return this.source;
public boolean equals(Flow flow, String flowSource) {
return this.equalsWithoutRevision(flow) &&
this.source.equals(cleanupSource(flowSource));
} }
@Override
public FlowWithSource toDeleted() { public FlowWithSource toDeleted() {
return this.toBuilder() return this.toBuilder()
.revision(this.revision + 1) .revision(this.revision + 1)
@@ -85,10 +87,4 @@ public class FlowWithSource extends Flow implements HasSource {
.sla(flow.sla) .sla(flow.sla)
.build(); .build();
} }
/** {@inheritDoc} **/
@Override
public String source() {
return getSource();
}
} }

View File

@@ -0,0 +1,124 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.exceptions.DeserializationException;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.Label;
import io.kestra.core.models.flows.sla.SLA;
import io.kestra.core.models.tasks.GenericTask;
import io.kestra.core.models.triggers.GenericTrigger;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.serializers.YamlParser;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Represents an un-typed {@link FlowInterface} implementation for which
* most properties are backed by a {@link Map}.
*
* <p>
* This implementation should be preferred over other implementations when
* no direct access to tasks and triggers is required.
*/
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@JsonDeserialize
public class GenericFlow extends AbstractFlow implements HasUID {
private String id;
private String namespace;
private Integer revision;
private List<Input<?>> inputs;
private Map<String, Object> variables;
@Builder.Default
private boolean disabled = false;
@Builder.Default
private boolean deleted = false;
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
private List<Label> labels;
private String tenantId;
private String source;
private List<SLA> sla;
private Concurrency concurrency;
private List<GenericTask> tasks;
private List<GenericTrigger> triggers;
@JsonIgnore
@Builder.Default
private Map<String, Object> additionalProperties = new HashMap<>();
/**
* Static helper method for constructing a {@link GenericFlow} from {@link FlowInterface}.
*
* @param flow The flow.
* @return a new {@link GenericFlow}
* @throws DeserializationException if source cannot be deserialized.
*/
@VisibleForTesting
public static GenericFlow of(final FlowInterface flow) throws DeserializationException {
return fromYaml(flow.getTenantId(), flow.sourceOrGenerateIfNull());
}
/**
* Static helper method for constructing a {@link GenericFlow} from a YAML source.
*
* @param source The flow YAML source.
* @return a new {@link GenericFlow}
* @throws DeserializationException if source cannot be deserialized.
*/
public static GenericFlow fromYaml(final String tenantId, final String source) throws DeserializationException {
GenericFlow parsed = YamlParser.parse(source, GenericFlow.class);
return parsed.toBuilder()
.tenantId(tenantId)
.source(source)
.build();
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
public List<GenericTask> getTasks() {
return Optional.ofNullable(tasks).orElse(List.of());
}
public List<GenericTrigger> getTriggers() {
return Optional.ofNullable(triggers).orElse(List.of());
}
}

View File

@@ -5,6 +5,7 @@ import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.tasks.*; import io.kestra.core.models.tasks.*;
import io.kestra.core.runners.FlowExecutorInterface; import io.kestra.core.runners.FlowExecutorInterface;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
@@ -52,7 +53,7 @@ public class SubflowGraphTask extends AbstractGraphTask {
} }
@Override @Override
public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, Flow flow, Execution execution) { public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, FlowInterface flow, Execution execution) {
return subflowTask.createSubflowExecutionResult(runContext, taskRun, flow, execution); return subflowTask.createSubflowExecutionResult(runContext, taskRun, flow, execution);
} }

View File

@@ -4,6 +4,8 @@ import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.runners.FlowExecutorInterface; import io.kestra.core.runners.FlowExecutorInterface;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecution; import io.kestra.core.runners.SubflowExecution;
@@ -29,9 +31,9 @@ public interface ExecutableTask<T extends Output>{
* Creates a SubflowExecutionResult for a given SubflowExecution * Creates a SubflowExecutionResult for a given SubflowExecution
*/ */
Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext,
TaskRun taskRun, TaskRun taskRun,
Flow flow, FlowInterface flow,
Execution execution); Execution execution);
/** /**
* Whether to wait for the execution(s) of the subflow before terminating this tasks * Whether to wait for the execution(s) of the subflow before terminating this tasks
@@ -51,12 +53,12 @@ public interface ExecutableTask<T extends Output>{
record SubflowId(String namespace, String flowId, Optional<Integer> revision) { record SubflowId(String namespace, String flowId, Optional<Integer> revision) {
public String flowUid() { public String flowUid() {
// as the Flow task can only be used in the same tenant we can hardcode null here // as the Flow task can only be used in the same tenant we can hardcode null here
return Flow.uid(null, this.namespace, this.flowId, this.revision); return FlowId.uid(null, this.namespace, this.flowId, this.revision);
} }
public String flowUidWithoutRevision() { public String flowUidWithoutRevision() {
// as the Flow task can only be used in the same tenant we can hardcode null here // as the Flow task can only be used in the same tenant we can hardcode null here
return Flow.uidWithoutRevision(null, this.namespace, this.flowId); return FlowId.uidWithoutRevision(null, this.namespace, this.flowId);
} }
} }

View File

@@ -0,0 +1,39 @@
package io.kestra.core.models.tasks;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.HashMap;
import java.util.Map;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@JsonDeserialize
public class GenericTask implements TaskInterface {
private String version;
private String id;
private String type;
private WorkerGroup workerGroup;
@JsonIgnore
@Builder.Default
private Map<String, Object> additionalProperties = new HashMap<>();
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}

View File

@@ -2,6 +2,7 @@ package io.kestra.core.models.topologies;
import io.kestra.core.models.TenantInterface; import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.swagger.v3.oas.annotations.Hidden; import io.swagger.v3.oas.annotations.Hidden;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
@@ -25,7 +26,7 @@ public class FlowNode implements TenantInterface {
String id; String id;
public static FlowNode of(Flow flow) { public static FlowNode of(FlowInterface flow) {
return FlowNode.builder() return FlowNode.builder()
.uid(flow.uidWithoutRevision()) .uid(flow.uidWithoutRevision())
.tenantId(flow.getTenantId()) .tenantId(flow.getTenantId())

View File

@@ -0,0 +1,40 @@
package io.kestra.core.models.triggers;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.kestra.core.models.tasks.WorkerGroup;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.HashMap;
import java.util.Map;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@JsonDeserialize
public class GenericTrigger implements TriggerInterface{
private String version;
private String id;
private String type;
private WorkerGroup workerGroup;
@JsonIgnore
@Builder.Default
private Map<String, Object> additionalProperties = new HashMap<>();
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}

View File

@@ -4,6 +4,8 @@ import io.kestra.core.models.HasUID;
import io.kestra.core.models.conditions.ConditionContext; import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import io.kestra.plugin.core.trigger.Schedule; import io.kestra.plugin.core.trigger.Schedule;
@@ -81,13 +83,13 @@ public class Trigger extends TriggerContext implements HasUID {
} }
public String flowUid() { public String flowUid() {
return Flow.uidWithoutRevision(this.getTenantId(), this.getNamespace(), this.getFlowId()); return FlowId.uidWithoutRevision(this.getTenantId(), this.getNamespace(), this.getFlowId());
} }
/** /**
* Create a new Trigger with no execution information and no evaluation lock. * Create a new Trigger with no execution information and no evaluation lock.
*/ */
public static Trigger of(Flow flow, AbstractTrigger abstractTrigger) { public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger) {
return Trigger.builder() return Trigger.builder()
.tenantId(flow.getTenantId()) .tenantId(flow.getTenantId())
.namespace(flow.getNamespace()) .namespace(flow.getNamespace())
@@ -163,7 +165,7 @@ public class Trigger extends TriggerContext implements HasUID {
} }
// Used to update trigger in flowListeners // Used to update trigger in flowListeners
public static Trigger of(Flow flow, AbstractTrigger abstractTrigger, ConditionContext conditionContext, Optional<Trigger> lastTrigger) throws Exception { public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger, ConditionContext conditionContext, Optional<Trigger> lastTrigger) throws Exception {
ZonedDateTime nextDate = null; ZonedDateTime nextDate = null;
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) { if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {

View File

@@ -1,6 +1,7 @@
package io.kestra.core.models.triggers.multipleflows; package io.kestra.core.models.triggers.multipleflows;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.triggers.TimeWindow; import io.kestra.core.models.triggers.TimeWindow;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
@@ -15,11 +16,11 @@ import java.util.Optional;
import static io.kestra.core.models.triggers.TimeWindow.Type.DURATION_WINDOW; import static io.kestra.core.models.triggers.TimeWindow.Type.DURATION_WINDOW;
public interface MultipleConditionStorageInterface { public interface MultipleConditionStorageInterface {
Optional<MultipleConditionWindow> get(Flow flow, String conditionId); Optional<MultipleConditionWindow> get(FlowId flow, String conditionId);
List<MultipleConditionWindow> expired(String tenantId); List<MultipleConditionWindow> expired(String tenantId);
default MultipleConditionWindow getOrCreate(Flow flow, MultipleCondition multipleCondition, Map<String, Object> outputs) { default MultipleConditionWindow getOrCreate(FlowId flow, MultipleCondition multipleCondition, Map<String, Object> outputs) {
ZonedDateTime now = ZonedDateTime.now().withNano(0); ZonedDateTime now = ZonedDateTime.now().withNano(0);
TimeWindow timeWindow = multipleCondition.getTimeWindow() != null ? multipleCondition.getTimeWindow() : TimeWindow.builder().build(); TimeWindow timeWindow = multipleCondition.getTimeWindow() != null ? multipleCondition.getTimeWindow() : TimeWindow.builder().build();

View File

@@ -3,6 +3,7 @@ package io.kestra.core.models.triggers.multipleflows;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import io.kestra.core.models.HasUID; import io.kestra.core.models.HasUID;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import lombok.Builder; import lombok.Builder;
import lombok.Value; import lombok.Value;
@@ -44,7 +45,7 @@ public class MultipleConditionWindow implements HasUID {
); );
} }
public static String uid(Flow flow, String conditionId) { public static String uid(FlowId flow, String conditionId) {
return IdUtils.fromParts( return IdUtils.fromParts(
flow.getTenantId(), flow.getTenantId(),
flow.getNamespace(), flow.getNamespace(),

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled; import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry; import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.triggers.Trigger; import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.runners.*; import io.kestra.core.runners.*;
@@ -42,7 +43,7 @@ public interface QueueFactoryInterface {
QueueInterface<MetricEntry> metricEntry(); QueueInterface<MetricEntry> metricEntry();
QueueInterface<FlowWithSource> flow(); QueueInterface<FlowInterface> flow();
QueueInterface<ExecutionKilled> kill(); QueueInterface<ExecutionKilled> kill();

View File

@@ -5,8 +5,10 @@ import io.kestra.core.models.SearchResult;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowForExecution; import io.kestra.core.models.flows.FlowForExecution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowScope; import io.kestra.core.models.flows.FlowScope;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.micronaut.data.model.Pageable; import io.micronaut.data.model.Pageable;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
@@ -176,9 +178,9 @@ public interface FlowRepositoryInterface {
.toList(); .toList();
} }
FlowWithSource create(Flow flow, String flowSource, Flow flowWithDefaults); FlowWithSource create(GenericFlow flow);
FlowWithSource update(Flow flow, Flow previous, String flowSource, Flow flowWithDefaults) throws ConstraintViolationException; FlowWithSource update(GenericFlow flow, FlowInterface previous) throws ConstraintViolationException;
FlowWithSource delete(FlowWithSource flow); FlowWithSource delete(FlowInterface flow);
} }

View File

@@ -1,11 +1,16 @@
package io.kestra.core.repositories; package io.kestra.core.repositories;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.validations.ModelValidator; import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser; import io.kestra.core.serializers.YamlParser;
import io.kestra.core.services.PluginDefaultService; import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.utils.Rethrow;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.inject.Singleton; import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@@ -15,22 +20,22 @@ import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.file.*; import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
import jakarta.validation.ConstraintViolationException;
import static io.kestra.core.utils.Rethrow.throwConsumer; import static io.kestra.core.utils.Rethrow.throwConsumer;
@Singleton @Singleton
@Slf4j @Slf4j
public class LocalFlowRepositoryLoader { public class LocalFlowRepositoryLoader {
@Inject
private YamlParser yamlParser;
@Inject @Inject
private FlowRepositoryInterface flowRepository; private FlowRepositoryInterface flowRepository;
@@ -68,47 +73,32 @@ public class LocalFlowRepositoryLoader {
} }
public void load(File basePath) throws IOException { public void load(File basePath) throws IOException {
Map<String, Flow> flowByUidInRepository = flowRepository.findAllForAllTenants().stream() Map<String, FlowInterface> flowByUidInRepository = flowRepository.findAllForAllTenants().stream()
.collect(Collectors.toMap(Flow::uidWithoutRevision, Function.identity())); .collect(Collectors.toMap(FlowId::uidWithoutRevision, Function.identity()));
List<Path> list = Files.walk(basePath.toPath())
.filter(YamlParser::isValidExtension)
.toList();
for (Path file : list) { try (Stream<Path> pathStream = Files.walk(basePath.toPath())) {
try { pathStream.filter(YamlParser::isValidExtension)
String flowSource = Files.readString(Path.of(file.toFile().getPath()), Charset.defaultCharset()); .forEach(Rethrow.throwConsumer(file -> {
Flow parse = yamlParser.parse(file.toFile(), Flow.class); try {
modelValidator.validate(parse); String source = Files.readString(Path.of(file.toFile().getPath()), Charset.defaultCharset());
GenericFlow parsed = GenericFlow.fromYaml(null, source);
Flow inRepository = flowByUidInRepository.get(parse.uidWithoutRevision()); FlowWithSource flowWithSource = pluginDefaultService.injectAllDefaults(parsed, false);
modelValidator.validate(flowWithSource);
if (inRepository == null) { FlowInterface existing = flowByUidInRepository.get(flowWithSource.uidWithoutRevision());
this.createFlow(flowSource, parse);
} else { if (existing == null) {
this.udpateFlow(flowSource, parse, inRepository); flowRepository.create(parsed);
} log.trace("Created flow {}.{}", parsed.getNamespace(), parsed.getId());
} catch (ConstraintViolationException e) { } else {
log.warn("Unable to create flow {}", file, e); flowRepository.update(parsed, existing);
} log.trace("Updated flow {}.{}", parsed.getNamespace(), parsed.getId());
}
} catch (ConstraintViolationException e) {
log.warn("Unable to create flow {}", file, e);
}
}));
} }
} }
private void createFlow(String flowSource, Flow parse) {
flowRepository.create(
parse,
flowSource,
parse
);
log.trace("Created flow {}.{}", parse.getNamespace(), parse.getId());
}
private void udpateFlow(String flowSource, Flow parse, Flow previous) {
flowRepository.update(
parse,
previous,
flowSource,
parse
);
log.trace("Updated flow {}.{}", parse.getNamespace(), parse.getId());
}
} }

View File

@@ -1,5 +1,6 @@
package io.kestra.core.runners; package io.kestra.core.runners;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface; import io.kestra.core.services.FlowListenersInterface;
@@ -20,8 +21,7 @@ public class DefaultFlowExecutor implements FlowExecutorInterface {
public DefaultFlowExecutor(FlowListenersInterface flowListeners, FlowRepositoryInterface flowRepository) { public DefaultFlowExecutor(FlowListenersInterface flowListeners, FlowRepositoryInterface flowRepository) {
this.flowRepository = flowRepository; this.flowRepository = flowRepository;
flowListeners.listen(flows -> allFlows = flows);
flowListeners.listen(flows -> this.allFlows = flows);
} }
@Override @Override
@@ -30,20 +30,22 @@ public class DefaultFlowExecutor implements FlowExecutorInterface {
} }
@Override @Override
public Optional<FlowWithSource> findById(String tenantId, String namespace, String id, Optional<Integer> revision) { @SuppressWarnings({"unchecked", "rawtypes"})
Optional<FlowWithSource> find = this.allFlows public Optional<FlowInterface> findById(String tenantId, String namespace, String id, Optional<Integer> revision) {
Optional<FlowInterface> find = this.allFlows
.stream() .stream()
.filter(flow -> ((flow.getTenantId() == null && tenantId == null) || Objects.equals(flow.getTenantId(), tenantId)) && .filter(flow -> ((flow.getTenantId() == null && tenantId == null) || Objects.equals(flow.getTenantId(), tenantId)) &&
flow.getNamespace().equals(namespace) && flow.getNamespace().equals(namespace) &&
flow.getId().equals(id) && flow.getId().equals(id) &&
(revision.isEmpty() || revision.get().equals(flow.getRevision())) (revision.isEmpty() || revision.get().equals(flow.getRevision()))
) )
.map(it -> (FlowInterface)it)
.findFirst(); .findFirst();
if (find.isPresent()) { if (find.isPresent()) {
return find; return find;
} else { } else {
return flowRepository.findByIdWithSource(tenantId, namespace, id, revision); return (Optional) flowRepository.findByIdWithSource(tenantId, namespace, id, revision);
} }
} }

View File

@@ -6,6 +6,7 @@ import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.executions.*; import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException; import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
@@ -143,7 +144,7 @@ public final class ExecutableUtils {
String subflowId = runContext.render(currentTask.subflowId().flowId()); String subflowId = runContext.render(currentTask.subflowId().flowId());
Optional<Integer> subflowRevision = currentTask.subflowId().revision(); Optional<Integer> subflowRevision = currentTask.subflowId().revision();
Flow flow = flowExecutorInterface.findByIdFromTask( FlowInterface flow = flowExecutorInterface.findByIdFromTask(
currentExecution.getTenantId(), currentExecution.getTenantId(),
subflowNamespace, subflowNamespace,
subflowId, subflowId,
@@ -212,7 +213,7 @@ public final class ExecutableUtils {
})); }));
} }
private static List<Label> filterLabels(List<Label> labels, Flow flow) { private static List<Label> filterLabels(List<Label> labels, FlowInterface flow) {
if (ListUtils.isEmpty(flow.getLabels())) { if (ListUtils.isEmpty(flow.getLabels())) {
return labels; return labels;
} }
@@ -304,7 +305,7 @@ public final class ExecutableUtils {
return State.Type.SUCCESS; return State.Type.SUCCESS;
} }
public static SubflowExecutionResult subflowExecutionResultFromChildExecution(RunContext runContext, Flow flow, Execution execution, ExecutableTask<?> executableTask, TaskRun taskRun) { public static SubflowExecutionResult subflowExecutionResultFromChildExecution(RunContext runContext, FlowInterface flow, Execution execution, ExecutableTask<?> executableTask, TaskRun taskRun) {
try { try {
return executableTask return executableTask
.createSubflowExecutionResult(runContext, taskRun, flow, execution) .createSubflowExecutionResult(runContext, taskRun, flow, execution)

View File

@@ -2,7 +2,6 @@ package io.kestra.core.runners;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import io.kestra.core.models.executions.*; import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithException; import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;

View File

@@ -6,6 +6,7 @@ import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.executions.*; import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.sla.Violation; import io.kestra.core.models.flows.sla.Violation;
@@ -92,7 +93,7 @@ public class ExecutorService {
return this.flowExecutorInterface; return this.flowExecutorInterface;
} }
public Executor checkConcurrencyLimit(Executor executor, Flow flow, Execution execution, long count) { public Executor checkConcurrencyLimit(Executor executor, FlowInterface flow, Execution execution, long count) {
// if above the limit, handle concurrency limit based on its behavior // if above the limit, handle concurrency limit based on its behavior
if (count >= flow.getConcurrency().getLimit()) { if (count >= flow.getConcurrency().getLimit()) {
return switch (flow.getConcurrency().getBehavior()) { return switch (flow.getConcurrency().getBehavior()) {
@@ -902,7 +903,7 @@ public class ExecutorService {
); );
} else { } else {
executions.addAll(subflowExecutions); executions.addAll(subflowExecutions);
Optional<FlowWithSource> flow = flowExecutorInterface.findByExecution(subflowExecutions.getFirst().getExecution()); Optional<FlowInterface> flow = flowExecutorInterface.findByExecution(subflowExecutions.getFirst().getExecution());
if (flow.isPresent()) { if (flow.isPresent()) {
// add SubflowExecutionResults to notify parents // add SubflowExecutionResults to notify parents
for (SubflowExecution<?> subflowExecution : subflowExecutions) { for (SubflowExecution<?> subflowExecution : subflowExecutions) {

View File

@@ -1,7 +1,7 @@
package io.kestra.core.runners; package io.kestra.core.runners;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import java.util.Collection; import java.util.Collection;
@@ -18,7 +18,7 @@ public interface FlowExecutorInterface {
* Find a flow. * Find a flow.
* WARNING: this method will NOT check if the namespace is allowed, so it should not be used inside a task. * WARNING: this method will NOT check if the namespace is allowed, so it should not be used inside a task.
*/ */
Optional<FlowWithSource> findById(String tenantId, String namespace, String id, Optional<Integer> revision); Optional<FlowInterface> findById(String tenantId, String namespace, String id, Optional<Integer> revision);
/** /**
* Whether the FlowExecutorInterface is ready to be used. * Whether the FlowExecutorInterface is ready to be used.
@@ -29,20 +29,15 @@ public interface FlowExecutorInterface {
* Find a flow. * Find a flow.
* This method will check if the namespace is allowed, so it can be used inside a task. * This method will check if the namespace is allowed, so it can be used inside a task.
*/ */
default Optional<FlowWithSource> findByIdFromTask(String tenantId, String namespace, String id, Optional<Integer> revision, String fromTenant, String fromNamespace, String fromId) { default Optional<FlowInterface> findByIdFromTask(String tenantId, String namespace, String id, Optional<Integer> revision, String fromTenant, String fromNamespace, String fromId) {
return this.findById( return this.findById(tenantId, namespace, id, revision);
tenantId,
namespace,
id,
revision
);
} }
/** /**
* Find a flow from an execution. * Find a flow from an execution.
* WARNING: this method will NOT check if the namespace is allowed, so it should not be used inside a task. * WARNING: this method will NOT check if the namespace is allowed, so it should not be used inside a task.
*/ */
default Optional<FlowWithSource> findByExecution(Execution execution) { default Optional<FlowInterface> findByExecution(Execution execution) {
if (execution.getFlowRevision() == null) { if (execution.getFlowRevision() == null) {
return Optional.empty(); return Optional.empty();
} }

View File

@@ -10,6 +10,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Data; import io.kestra.core.models.flows.Data;
import io.kestra.core.models.flows.DependsOn; import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input; import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.RenderableInput; import io.kestra.core.models.flows.RenderableInput;
import io.kestra.core.models.flows.Type; import io.kestra.core.models.flows.Type;
@@ -110,7 +111,7 @@ public class FlowInputOutput {
* @param data The Execution's inputs data. * @param data The Execution's inputs data.
* @return The Map of typed inputs. * @return The Map of typed inputs.
*/ */
public Mono<Map<String, Object>> readExecutionInputs(final Flow flow, public Mono<Map<String, Object>> readExecutionInputs(final FlowInterface flow,
final Execution execution, final Execution execution,
final Publisher<CompletedPart> data) { final Publisher<CompletedPart> data) {
return this.readExecutionInputs(flow.getInputs(), flow, execution, data); return this.readExecutionInputs(flow.getInputs(), flow, execution, data);
@@ -125,7 +126,7 @@ public class FlowInputOutput {
* @return The Map of typed inputs. * @return The Map of typed inputs.
*/ */
public Mono<Map<String, Object>> readExecutionInputs(final List<Input<?>> inputs, public Mono<Map<String, Object>> readExecutionInputs(final List<Input<?>> inputs,
final Flow flow, final FlowInterface flow,
final Execution execution, final Execution execution,
final Publisher<CompletedPart> data) { final Publisher<CompletedPart> data) {
return readData(inputs, execution, data, true).map(inputData -> this.readExecutionInputs(inputs, flow, execution, inputData)); return readData(inputs, execution, data, true).map(inputData -> this.readExecutionInputs(inputs, flow, execution, inputData));
@@ -189,7 +190,7 @@ public class FlowInputOutput {
* @return The Map of typed inputs. * @return The Map of typed inputs.
*/ */
public Map<String, Object> readExecutionInputs( public Map<String, Object> readExecutionInputs(
final Flow flow, final FlowInterface flow,
final Execution execution, final Execution execution,
final Map<String, ?> data final Map<String, ?> data
) { ) {
@@ -198,7 +199,7 @@ public class FlowInputOutput {
private Map<String, Object> readExecutionInputs( private Map<String, Object> readExecutionInputs(
final List<Input<?>> inputs, final List<Input<?>> inputs,
final Flow flow, final FlowInterface flow,
final Execution execution, final Execution execution,
final Map<String, ?> data final Map<String, ?> data
) { ) {
@@ -227,7 +228,7 @@ public class FlowInputOutput {
@VisibleForTesting @VisibleForTesting
public List<InputAndValue> resolveInputs( public List<InputAndValue> resolveInputs(
final List<Input<?>> inputs, final List<Input<?>> inputs,
final Flow flow, final FlowInterface flow,
final Execution execution, final Execution execution,
final Map<String, ?> data final Map<String, ?> data
) { ) {
@@ -251,7 +252,7 @@ public class FlowInputOutput {
@SuppressWarnings({"unchecked", "rawtypes"}) @SuppressWarnings({"unchecked", "rawtypes"})
private InputAndValue resolveInputValue( private InputAndValue resolveInputValue(
final @NotNull ResolvableInput resolvable, final @NotNull ResolvableInput resolvable,
final Flow flow, final FlowInterface flow,
final @NotNull Execution execution, final @NotNull Execution execution,
final @NotNull Map<String, ResolvableInput> inputs) { final @NotNull Map<String, ResolvableInput> inputs) {
@@ -329,7 +330,7 @@ public class FlowInputOutput {
return resolvable.get(); return resolvable.get();
} }
private RunContext buildRunContextForExecutionAndInputs(final Flow flow, final Execution execution, Map<String, InputAndValue> dependencies) { private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies) {
Map<String, Object> flattenInputs = MapUtils.flattenToNestedMap(dependencies.entrySet() Map<String, Object> flattenInputs = MapUtils.flattenToNestedMap(dependencies.entrySet()
.stream() .stream()
.collect(HashMap::new, (m, v) -> m.put(v.getKey(), v.getValue().value()), HashMap::putAll) .collect(HashMap::new, (m, v) -> m.put(v.getKey(), v.getValue().value()), HashMap::putAll)
@@ -337,7 +338,7 @@ public class FlowInputOutput {
return runContextFactory.of(flow, execution, vars -> vars.withInputs(flattenInputs)); return runContextFactory.of(flow, execution, vars -> vars.withInputs(flattenInputs));
} }
private Map<String, InputAndValue> resolveAllDependentInputs(final Input<?> input, final Flow flow, final Execution execution, final Map<String, ResolvableInput> inputs) { private Map<String, InputAndValue> resolveAllDependentInputs(final Input<?> input, final FlowInterface flow, final Execution execution, final Map<String, ResolvableInput> inputs) {
return Optional.ofNullable(input.getDependsOn()) return Optional.ofNullable(input.getDependsOn())
.map(DependsOn::inputs) .map(DependsOn::inputs)
.stream() .stream()
@@ -350,7 +351,7 @@ public class FlowInputOutput {
} }
public Map<String, Object> typedOutputs( public Map<String, Object> typedOutputs(
final Flow flow, final FlowInterface flow,
final Execution execution, final Execution execution,
final Map<String, Object> in final Map<String, Object> in
) { ) {

View File

@@ -1,9 +1,9 @@
package io.kestra.core.runners; package io.kestra.core.runners;
import com.fasterxml.jackson.databind.ObjectMapper; import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException; import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.services.PluginDefaultService;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import io.kestra.core.queues.QueueFactoryInterface; import io.kestra.core.queues.QueueFactoryInterface;
@@ -11,12 +11,9 @@ import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface; import io.kestra.core.services.FlowListenersInterface;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.Consumer; import java.util.function.Consumer;
@@ -28,22 +25,24 @@ import jakarta.inject.Singleton;
@Singleton @Singleton
@Slf4j @Slf4j
public class FlowListeners implements FlowListenersInterface { public class FlowListeners implements FlowListenersInterface {
private static final ObjectMapper MAPPER = JacksonMapper.ofJson();
private final AtomicBoolean isStarted = new AtomicBoolean(false); private final AtomicBoolean isStarted = new AtomicBoolean(false);
private final QueueInterface<FlowWithSource> flowQueue; private final QueueInterface<FlowInterface> flowQueue;
private final List<FlowWithSource> flows; private final List<FlowWithSource> flows;
private final List<Consumer<List<FlowWithSource>>> consumers = new CopyOnWriteArrayList<>(); private final List<Consumer<List<FlowWithSource>>> consumers = new ArrayList<>();
private final List<BiConsumer<FlowWithSource, FlowWithSource>> consumersEach = new ArrayList<>();
private final List<BiConsumer<FlowWithSource, FlowWithSource>> consumersEach = new CopyOnWriteArrayList<>(); private final PluginDefaultService pluginDefaultService;
@Inject @Inject
public FlowListeners( public FlowListeners(
FlowRepositoryInterface flowRepository, FlowRepositoryInterface flowRepository,
@Named(QueueFactoryInterface.FLOW_NAMED) QueueInterface<FlowWithSource> flowQueue @Named(QueueFactoryInterface.FLOW_NAMED) QueueInterface<FlowInterface> flowQueue,
PluginDefaultService pluginDefaultService
) { ) {
this.flowQueue = flowQueue; this.flowQueue = flowQueue;
this.flows = flowRepository.findAllWithSourceForAllTenants(); this.flows = new ArrayList<>(flowRepository.findAllWithSourceForAllTenants());
this.pluginDefaultService = pluginDefaultService;
} }
@Override @Override
@@ -53,19 +52,14 @@ public class FlowListeners implements FlowListenersInterface {
this.flowQueue.receive(either -> { this.flowQueue.receive(either -> {
FlowWithSource flow; FlowWithSource flow;
if (either.isRight()) { if (either.isRight()) {
log.error("Unable to deserialize a flow: {}", either.getRight().getMessage()); flow = FlowWithException.from(either.getRight().getRecord(), either.getRight(), log).orElse(null);
try { if (flow == null) {
var jsonNode = MAPPER.readTree(either.getRight().getRecord());
flow = FlowWithException.from(jsonNode, either.getRight()).orElseThrow(IOException::new);
} catch (IOException e) {
// if we cannot create a FlowWithException, ignore the message
log.error("Unexpected exception when trying to handle a deserialization error", e);
return; return;
} }
} else {
flow = pluginDefaultService.injectVersionDefaults(either.getLeft(), true);
} }
else {
flow = either.getLeft();
}
Optional<FlowWithSource> previous = this.previous(flow); Optional<FlowWithSource> previous = this.previous(flow);
if (flow.isDeleted()) { if (flow.isDeleted()) {
@@ -96,17 +90,14 @@ public class FlowListeners implements FlowListenersInterface {
} }
} }
private Optional<FlowWithSource> previous(FlowWithSource flow) { private Optional<FlowWithSource> previous(final FlowWithSource flow) {
List<FlowWithSource> copy = new ArrayList<>(flows); List<FlowWithSource> copy = new ArrayList<>(flows);
return copy return copy.stream().filter(r -> r.isSameId(flow)).findFirst();
.stream()
.filter(r -> Objects.equals(r.getTenantId(), flow.getTenantId()) && r.getNamespace().equals(flow.getNamespace()) && r.getId().equals(flow.getId()))
.findFirst();
} }
private boolean remove(FlowWithSource flow) { private boolean remove(FlowInterface flow) {
synchronized (this) { synchronized (this) {
boolean remove = flows.removeIf(r -> Objects.equals(r.getTenantId(), flow.getTenantId()) && r.getNamespace().equals(flow.getNamespace()) && r.getId().equals(flow.getId())); boolean remove = flows.removeIf(r -> r.isSameId(flow));
if (!remove && flow.isDeleted()) { if (!remove && flow.isDeleted()) {
log.warn("Can't remove flow {}.{}", flow.getNamespace(), flow.getId()); log.warn("Can't remove flow {}.{}", flow.getNamespace(), flow.getId());
} }
@@ -125,8 +116,7 @@ public class FlowListeners implements FlowListenersInterface {
private void notifyConsumers() { private void notifyConsumers() {
synchronized (this) { synchronized (this) {
this.consumers this.consumers.forEach(consumer -> consumer.accept(new ArrayList<>(this.flows)));
.forEach(consumer -> consumer.accept(new ArrayList<>(this.flows)));
} }
} }

View File

@@ -5,6 +5,7 @@ import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Type; import io.kestra.core.models.flows.Type;
import io.kestra.core.models.tasks.Task; import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
@@ -75,11 +76,11 @@ public class RunContextFactory {
return applicationContext.getBean(RunContextInitializer.class); return applicationContext.getBean(RunContextInitializer.class);
} }
public RunContext of(Flow flow, Execution execution) { public RunContext of(FlowInterface flow, Execution execution) {
return of(flow, execution, Function.identity()); return of(flow, execution, Function.identity());
} }
public RunContext of(Flow flow, Execution execution, Function<RunVariables.Builder, RunVariables.Builder> runVariableModifier) { public RunContext of(FlowInterface flow, Execution execution, Function<RunVariables.Builder, RunVariables.Builder> runVariableModifier) {
RunContextLogger runContextLogger = runContextLoggerFactory.create(execution); RunContextLogger runContextLogger = runContextLoggerFactory.create(execution);
return newBuilder() return newBuilder()
@@ -100,11 +101,11 @@ public class RunContextFactory {
.build(); .build();
} }
public RunContext of(Flow flow, Task task, Execution execution, TaskRun taskRun) { public RunContext of(FlowInterface flow, Task task, Execution execution, TaskRun taskRun) {
return this.of(flow, task, execution, taskRun, true); return this.of(flow, task, execution, taskRun, true);
} }
public RunContext of(Flow flow, Task task, Execution execution, TaskRun taskRun, boolean decryptVariables) { public RunContext of(FlowInterface flow, Task task, Execution execution, TaskRun taskRun, boolean decryptVariables) {
RunContextLogger runContextLogger = runContextLoggerFactory.create(taskRun, task); RunContextLogger runContextLogger = runContextLoggerFactory.create(taskRun, task);
return newBuilder() return newBuilder()
@@ -202,7 +203,7 @@ public class RunContextFactory {
return of(Map.of()); return of(Map.of());
} }
private List<String> secretInputsFromFlow(Flow flow) { private List<String> secretInputsFromFlow(FlowInterface flow) {
if (flow == null || flow.getInputs() == null) { if (flow == null || flow.getInputs() == null) {
return Collections.emptyList(); return Collections.emptyList();
} }

View File

@@ -5,6 +5,7 @@ import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input; import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.input.SecretInput; import io.kestra.core.models.flows.input.SecretInput;
@@ -73,7 +74,7 @@ public final class RunVariables {
* @param flow The flow from which to create variables. * @param flow The flow from which to create variables.
* @return a new immutable {@link Map}. * @return a new immutable {@link Map}.
*/ */
static Map<String, Object> of(final Flow flow) { static Map<String, Object> of(final FlowInterface flow) {
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
builder.put("id", flow.getId()) builder.put("id", flow.getId())
.put("namespace", flow.getNamespace()); .put("namespace", flow.getNamespace());
@@ -105,7 +106,7 @@ public final class RunVariables {
*/ */
public interface Builder { public interface Builder {
Builder withFlow(Flow flow); Builder withFlow(FlowInterface flow);
Builder withInputs(Map<String, Object> inputs); Builder withInputs(Map<String, Object> inputs);
@@ -147,7 +148,7 @@ public final class RunVariables {
@With @With
public static class DefaultBuilder implements RunVariables.Builder { public static class DefaultBuilder implements RunVariables.Builder {
protected Flow flow; protected FlowInterface flow;
protected Task task; protected Task task;
protected Execution execution; protected Execution execution;
protected TaskRun taskRun; protected TaskRun taskRun;

View File

@@ -4,6 +4,7 @@ import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.queues.QueueException; import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface; import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface; import io.kestra.core.queues.QueueInterface;
@@ -47,7 +48,7 @@ public class RunnerUtils {
return this.runOne(tenantId, namespace, flowId, revision, null, null, null); return this.runOne(tenantId, namespace, flowId, revision, null, null, null);
} }
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException { public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException {
return this.runOne(tenantId, namespace, flowId, revision, inputs, null, null); return this.runOne(tenantId, namespace, flowId, revision, inputs, null, null);
} }
@@ -55,11 +56,11 @@ public class RunnerUtils {
return this.runOne(tenantId, namespace, flowId, null, null, duration, null); return this.runOne(tenantId, namespace, flowId, null, null, duration, null);
} }
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException { public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOne(tenantId, namespace, flowId, revision, inputs, duration, null); return this.runOne(tenantId, namespace, flowId, revision, inputs, duration, null);
} }
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException { public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException {
return this.runOne( return this.runOne(
flowRepository flowRepository
.findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty()) .findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty())
@@ -69,15 +70,15 @@ public class RunnerUtils {
labels); labels);
} }
public Execution runOne(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException { public Execution runOne(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException {
return this.runOne(flow, inputs, null, null); return this.runOne(flow, inputs, null, null);
} }
public Execution runOne(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException { public Execution runOne(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOne(flow, inputs, duration, null); return this.runOne(flow, inputs, duration, null);
} }
public Execution runOne(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException { public Execution runOne(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException {
if (duration == null) { if (duration == null) {
duration = Duration.ofSeconds(15); duration = Duration.ofSeconds(15);
} }
@@ -93,7 +94,7 @@ public class RunnerUtils {
return this.runOneUntilPaused(tenantId, namespace, flowId, null, null, null); return this.runOneUntilPaused(tenantId, namespace, flowId, null, null, null);
} }
public Execution runOneUntilPaused(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException { public Execution runOneUntilPaused(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOneUntilPaused( return this.runOneUntilPaused(
flowRepository flowRepository
.findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty()) .findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty())
@@ -103,7 +104,7 @@ public class RunnerUtils {
); );
} }
public Execution runOneUntilPaused(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException { public Execution runOneUntilPaused(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
if (duration == null) { if (duration == null) {
duration = DEFAULT_MAX_WAIT_DURATION; duration = DEFAULT_MAX_WAIT_DURATION;
} }
@@ -119,7 +120,7 @@ public class RunnerUtils {
return this.runOneUntilRunning(tenantId, namespace, flowId, null, null, null); return this.runOneUntilRunning(tenantId, namespace, flowId, null, null, null);
} }
public Execution runOneUntilRunning(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException { public Execution runOneUntilRunning(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOneUntilRunning( return this.runOneUntilRunning(
flowRepository flowRepository
.findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty()) .findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty())
@@ -129,7 +130,7 @@ public class RunnerUtils {
); );
} }
public Execution runOneUntilRunning(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException { public Execution runOneUntilRunning(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
if (duration == null) { if (duration == null) {
duration = DEFAULT_MAX_WAIT_DURATION; duration = DEFAULT_MAX_WAIT_DURATION;
} }

View File

@@ -13,6 +13,8 @@ import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled; import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.ExecutionKilledTrigger; import io.kestra.core.models.executions.ExecutionKilledTrigger;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException; import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
@@ -32,7 +34,6 @@ import io.kestra.core.utils.Await;
import io.kestra.core.utils.Either; import io.kestra.core.utils.Either;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils; import io.kestra.core.utils.ListUtils;
import io.kestra.core.models.flows.Flow;
import io.micronaut.context.ApplicationContext; import io.micronaut.context.ApplicationContext;
import io.micronaut.context.event.ApplicationEventPublisher; import io.micronaut.context.event.ApplicationEventPublisher;
import io.micronaut.inject.qualifiers.Qualifiers; import io.micronaut.inject.qualifiers.Qualifiers;
@@ -172,6 +173,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
// remove trigger on flow update, update local triggers store, and stop the trigger on the worker // remove trigger on flow update, update local triggers store, and stop the trigger on the worker
this.flowListeners.listen((flow, previous) -> { this.flowListeners.listen((flow, previous) -> {
if (flow.isDeleted() || previous != null) { if (flow.isDeleted() || previous != null) {
List<AbstractTrigger> triggersDeleted = flow.isDeleted() ? List<AbstractTrigger> triggersDeleted = flow.isDeleted() ?
ListUtils.emptyOnNull(flow.getTriggers()) : ListUtils.emptyOnNull(flow.getTriggers()) :
@@ -287,7 +289,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
flows flows
.stream() .stream()
.map(flow -> pluginDefaultService.injectDefaults(flow, log)) .map(flow -> pluginDefaultService.injectAllDefaults(flow, log))
.filter(Objects::nonNull) .filter(Objects::nonNull)
.filter(flow -> flow.getTriggers() != null && !flow.getTriggers().isEmpty()) .filter(flow -> flow.getTriggers() != null && !flow.getTriggers().isEmpty())
.flatMap(flow -> flow.getTriggers().stream().filter(trigger -> trigger instanceof WorkerTriggerInterface).map(trigger -> new FlowAndTrigger(flow, trigger))) .flatMap(flow -> flow.getTriggers().stream().filter(trigger -> trigger instanceof WorkerTriggerInterface).map(trigger -> new FlowAndTrigger(flow, trigger)))
@@ -430,7 +432,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
List<String> flowToKeep = triggerContextsToEvaluate.stream().map(Trigger::getFlowId).toList(); List<String> flowToKeep = triggerContextsToEvaluate.stream().map(Trigger::getFlowId).toList();
triggerContextsToEvaluate.stream() triggerContextsToEvaluate.stream()
.filter(trigger -> !flows.stream().map(FlowWithSource::uidWithoutRevision).toList().contains(Flow.uid(trigger))) .filter(trigger -> !flows.stream().map(FlowId::uidWithoutRevision).toList().contains(FlowId.uid(trigger)))
.forEach(trigger -> { .forEach(trigger -> {
try { try {
this.triggerState.delete(trigger); this.triggerState.delete(trigger);
@@ -441,8 +443,6 @@ public abstract class AbstractScheduler implements Scheduler, Service {
return flows return flows
.stream() .stream()
.map(flow -> pluginDefaultService.injectDefaults(flow, log))
.filter(Objects::nonNull)
.filter(flow -> flowToKeep.contains(flow.getId())) .filter(flow -> flowToKeep.contains(flow.getId()))
.filter(flow -> flow.getTriggers() != null && !flow.getTriggers().isEmpty()) .filter(flow -> flow.getTriggers() != null && !flow.getTriggers().isEmpty())
.filter(flow -> !flow.isDisabled() && !(flow instanceof FlowWithException)) .filter(flow -> !flow.isDisabled() && !(flow instanceof FlowWithException))
@@ -493,9 +493,8 @@ public abstract class AbstractScheduler implements Scheduler, Service {
abstract public void handleNext(List<FlowWithSource> flows, ZonedDateTime now, BiConsumer<List<Trigger>, ScheduleContextInterface> consumer); abstract public void handleNext(List<FlowWithSource> flows, ZonedDateTime now, BiConsumer<List<Trigger>, ScheduleContextInterface> consumer);
public List<FlowWithTriggers> schedulerTriggers() { public List<FlowWithTriggers> schedulerTriggers() {
Map<String, FlowWithSource> flows = this.flowListeners.flows() Map<String, FlowWithSource> flows = getFlowsWithDefaults().stream()
.stream() .collect(Collectors.toMap(FlowInterface::uidWithoutRevision, Function.identity()));
.collect(Collectors.toMap(FlowWithSource::uidWithoutRevision, Function.identity()));
return this.triggerState.findAllForAllTenants().stream() return this.triggerState.findAllForAllTenants().stream()
.filter(trigger -> flows.containsKey(trigger.flowUid())) .filter(trigger -> flows.containsKey(trigger.flowUid()))
@@ -521,7 +520,9 @@ public abstract class AbstractScheduler implements Scheduler, Service {
ZonedDateTime now = now(); ZonedDateTime now = now();
this.handleNext(this.flowListeners.flows(), now, (triggers, scheduleContext) -> { final List<FlowWithSource> flowWithDefaults = getFlowsWithDefaults();
this.handleNext(flowWithDefaults, now, (triggers, scheduleContext) -> {
if (triggers.isEmpty()) { if (triggers.isEmpty()) {
return; return;
} }
@@ -530,7 +531,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
.filter(trigger -> Boolean.FALSE.equals(trigger.getDisabled())) .filter(trigger -> Boolean.FALSE.equals(trigger.getDisabled()))
.toList(); .toList();
List<FlowWithTriggers> schedulable = this.computeSchedulable(flowListeners.flows(), triggerContextsToEvaluate, scheduleContext); List<FlowWithTriggers> schedulable = this.computeSchedulable(flowWithDefaults, triggerContextsToEvaluate, scheduleContext);
metricRegistry metricRegistry
.counter(MetricRegistry.SCHEDULER_LOOP_COUNT) .counter(MetricRegistry.SCHEDULER_LOOP_COUNT)
@@ -661,6 +662,13 @@ public abstract class AbstractScheduler implements Scheduler, Service {
}); });
} }
private List<FlowWithSource> getFlowsWithDefaults() {
return this.flowListeners.flows().stream()
.map(flow -> pluginDefaultService.injectAllDefaults(flow, log))
.filter(Objects::nonNull)
.toList();
}
private void handleEvaluateWorkerTriggerResult(SchedulerExecutionWithTrigger result, ZonedDateTime private void handleEvaluateWorkerTriggerResult(SchedulerExecutionWithTrigger result, ZonedDateTime
nextExecutionDate) { nextExecutionDate) {
Optional.ofNullable(result) Optional.ofNullable(result)
@@ -815,35 +823,31 @@ public abstract class AbstractScheduler implements Scheduler, Service {
private Optional<SchedulerExecutionWithTrigger> evaluateScheduleTrigger(FlowWithWorkerTrigger flowWithTrigger) { private Optional<SchedulerExecutionWithTrigger> evaluateScheduleTrigger(FlowWithWorkerTrigger flowWithTrigger) {
try { try {
FlowWithWorkerTrigger flowWithWorkerTrigger = flowWithTrigger.from(pluginDefaultService.injectDefaults(
flowWithTrigger.getFlow(),
flowWithTrigger.getConditionContext().getRunContext().logger()
));
// mutability dirty hack that forces the creation of a new triggerExecutionId // mutability dirty hack that forces the creation of a new triggerExecutionId
DefaultRunContext runContext = (DefaultRunContext) flowWithWorkerTrigger.getConditionContext().getRunContext(); DefaultRunContext runContext = (DefaultRunContext) flowWithTrigger.getConditionContext().getRunContext();
runContextInitializer.forScheduler( runContextInitializer.forScheduler(
runContext, runContext,
flowWithWorkerTrigger.getTriggerContext(), flowWithTrigger.getTriggerContext(),
flowWithWorkerTrigger.getAbstractTrigger() flowWithTrigger.getAbstractTrigger()
); );
Optional<Execution> evaluate = ((Schedulable) flowWithWorkerTrigger.getAbstractTrigger()).evaluate( Optional<Execution> evaluate = ((Schedulable) flowWithTrigger.getAbstractTrigger()).evaluate(
flowWithWorkerTrigger.getConditionContext(), flowWithTrigger.getConditionContext(),
flowWithWorkerTrigger.getTriggerContext() flowWithTrigger.getTriggerContext()
); );
if (log.isDebugEnabled()) { if (log.isDebugEnabled()) {
logService.logTrigger( logService.logTrigger(
flowWithWorkerTrigger.getTriggerContext(), flowWithTrigger.getTriggerContext(),
Level.DEBUG, Level.DEBUG,
"[type: {}] {}", "[type: {}] {}",
flowWithWorkerTrigger.getAbstractTrigger().getType(), flowWithTrigger.getAbstractTrigger().getType(),
evaluate.map(execution -> "New execution '" + execution.getId() + "'").orElse("Empty evaluation") evaluate.map(execution -> "New execution '" + execution.getId() + "'").orElse("Empty evaluation")
); );
} }
flowWithWorkerTrigger.getConditionContext().getRunContext().cleanup(); flowWithTrigger.getConditionContext().getRunContext().cleanup();
return evaluate.map(execution -> new SchedulerExecutionWithTrigger( return evaluate.map(execution -> new SchedulerExecutionWithTrigger(
execution, execution,
@@ -890,11 +894,6 @@ public abstract class AbstractScheduler implements Scheduler, Service {
} }
private void sendWorkerTriggerToWorker(FlowWithWorkerTrigger flowWithTrigger) throws InternalException { private void sendWorkerTriggerToWorker(FlowWithWorkerTrigger flowWithTrigger) throws InternalException {
FlowWithWorkerTrigger flowWithTriggerWithDefault = flowWithTrigger.from(
pluginDefaultService.injectDefaults(flowWithTrigger.getFlow(),
flowWithTrigger.getConditionContext().getRunContext().logger())
);
if (log.isDebugEnabled()) { if (log.isDebugEnabled()) {
logService.logTrigger( logService.logTrigger(
flowWithTrigger.getTriggerContext(), flowWithTrigger.getTriggerContext(),
@@ -906,23 +905,23 @@ public abstract class AbstractScheduler implements Scheduler, Service {
var workerTrigger = WorkerTrigger var workerTrigger = WorkerTrigger
.builder() .builder()
.trigger(flowWithTriggerWithDefault.abstractTrigger) .trigger(flowWithTrigger.abstractTrigger)
.triggerContext(flowWithTriggerWithDefault.triggerContext) .triggerContext(flowWithTrigger.triggerContext)
.conditionContext(flowWithTriggerWithDefault.conditionContext) .conditionContext(flowWithTrigger.conditionContext)
.build(); .build();
try { try {
Optional<WorkerGroup> workerGroup = workerGroupService.resolveGroupFromJob(workerTrigger); Optional<WorkerGroup> workerGroup = workerGroupService.resolveGroupFromJob(workerTrigger);
if (workerGroup.isPresent()) { if (workerGroup.isPresent()) {
// Check if the worker group exist // Check if the worker group exist
String tenantId = flowWithTrigger.getFlow().getTenantId(); String tenantId = flowWithTrigger.getFlow().getTenantId();
RunContext runContext = flowWithTriggerWithDefault.conditionContext.getRunContext(); RunContext runContext = flowWithTrigger.conditionContext.getRunContext();
String workerGroupKey = runContext.render(workerGroup.get().getKey()); String workerGroupKey = runContext.render(workerGroup.get().getKey());
if (workerGroupExecutorInterface.isWorkerGroupExistForKey(workerGroupKey, tenantId)) { if (workerGroupExecutorInterface.isWorkerGroupExistForKey(workerGroupKey, tenantId)) {
// Check whether at-least one worker is available // Check whether at-least one worker is available
if (workerGroupExecutorInterface.isWorkerGroupAvailableForKey(workerGroupKey)) { if (workerGroupExecutorInterface.isWorkerGroupAvailableForKey(workerGroupKey)) {
this.workerJobQueue.emit(workerGroupKey, workerTrigger); this.workerJobQueue.emit(workerGroupKey, workerTrigger);
} else { } else {
WorkerGroup.Fallback fallback = workerGroup.map(wg -> wg.getFallback()).orElse(WorkerGroup.Fallback.WAIT); WorkerGroup.Fallback fallback = workerGroup.map(WorkerGroup::getFallback).orElse(WorkerGroup.Fallback.WAIT);
switch(fallback) { switch(fallback) {
case FAIL -> runContext.logger() case FAIL -> runContext.logger()
.error("No workers are available for worker group '{}', ignoring the trigger.", workerGroupKey); .error("No workers are available for worker group '{}', ignoring the trigger.", workerGroupKey);

View File

@@ -7,7 +7,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.exc.InvalidTypeIdException; import com.fasterxml.jackson.databind.exc.InvalidTypeIdException;
import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException; import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException;
import io.kestra.core.models.validations.ManualConstraintViolation; import io.kestra.core.models.validations.ManualConstraintViolation;
import jakarta.inject.Singleton;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
@@ -20,8 +19,7 @@ import java.util.Collections;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@Singleton public final class YamlParser {
public class YamlParser {
private static final ObjectMapper STRICT_MAPPER = JacksonMapper.ofYaml() private static final ObjectMapper STRICT_MAPPER = JacksonMapper.ofYaml()
.enable(JsonParser.Feature.STRICT_DUPLICATE_DETECTION) .enable(JsonParser.Feature.STRICT_DUPLICATE_DETECTION)
.disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); .disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE);
@@ -33,12 +31,11 @@ public class YamlParser {
return FilenameUtils.getExtension(path.toFile().getAbsolutePath()).equals("yaml") || FilenameUtils.getExtension(path.toFile().getAbsolutePath()).equals("yml"); return FilenameUtils.getExtension(path.toFile().getAbsolutePath()).equals("yaml") || FilenameUtils.getExtension(path.toFile().getAbsolutePath()).equals("yml");
} }
public <T> T parse(String input, Class<T> cls) { public static <T> T parse(String input, Class<T> cls) {
return read(input, cls, type(cls)); return read(input, cls, type(cls));
} }
public static <T> T parse(Map<String, Object> input, Class<T> cls, Boolean strict) {
public <T> T parse(Map<String, Object> input, Class<T> cls, Boolean strict) {
ObjectMapper currentMapper = strict ? STRICT_MAPPER : NON_STRICT_MAPPER; ObjectMapper currentMapper = strict ? STRICT_MAPPER : NON_STRICT_MAPPER;
try { try {
@@ -56,7 +53,7 @@ public class YamlParser {
return cls.getSimpleName().toLowerCase(); return cls.getSimpleName().toLowerCase();
} }
public <T> T parse(File file, Class<T> cls) throws ConstraintViolationException { public static <T> T parse(File file, Class<T> cls) throws ConstraintViolationException {
try { try {
String input = IOUtils.toString(file.toURI(), StandardCharsets.UTF_8); String input = IOUtils.toString(file.toURI(), StandardCharsets.UTF_8);
return read(input, cls, type(cls)); return read(input, cls, type(cls));
@@ -77,13 +74,12 @@ public class YamlParser {
} }
} }
private <T> T read(String input, Class<T> objectClass, String resource) { private static <T> T read(String input, Class<T> objectClass, String resource) {
try { try {
return STRICT_MAPPER.readValue(input, objectClass); return STRICT_MAPPER.readValue(input, objectClass);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
jsonProcessingExceptionHandler(input, resource, e); jsonProcessingExceptionHandler(input, resource, e);
} }
return null; return null;
} }

View File

@@ -7,6 +7,7 @@ import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition; import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.tasks.ResolvedTask; import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.multipleflows.MultipleCondition; import io.kestra.core.models.triggers.multipleflows.MultipleCondition;
@@ -32,7 +33,7 @@ public class ConditionService {
private RunContextFactory runContextFactory; private RunContextFactory runContextFactory;
@VisibleForTesting @VisibleForTesting
public boolean isValid(Condition condition, Flow flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) { public boolean isValid(Condition condition, FlowInterface flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) {
ConditionContext conditionContext = this.conditionContext( ConditionContext conditionContext = this.conditionContext(
runContextFactory.of(flow, execution), runContextFactory.of(flow, execution),
flow, flow,
@@ -43,11 +44,11 @@ public class ConditionService {
return this.valid(flow, Collections.singletonList(condition), conditionContext); return this.valid(flow, Collections.singletonList(condition), conditionContext);
} }
public boolean isValid(Condition condition, Flow flow, @Nullable Execution execution) { public boolean isValid(Condition condition, FlowInterface flow, @Nullable Execution execution) {
return this.isValid(condition, flow, execution, null); return this.isValid(condition, flow, execution, null);
} }
private void logException(Flow flow, Object condition, ConditionContext conditionContext, Exception e) { private void logException(FlowInterface flow, Object condition, ConditionContext conditionContext, Exception e) {
conditionContext.getRunContext().logger().warn( conditionContext.getRunContext().logger().warn(
"[namespace: {}] [flow: {}] [condition: {}] Evaluate Condition Failed with error '{}'", "[namespace: {}] [flow: {}] [condition: {}] Evaluate Condition Failed with error '{}'",
flow.getNamespace(), flow.getNamespace(),
@@ -116,7 +117,7 @@ public class ConditionService {
} }
} }
public ConditionContext conditionContext(RunContext runContext, Flow flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) { public ConditionContext conditionContext(RunContext runContext, FlowInterface flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) {
return ConditionContext.builder() return ConditionContext.builder()
.flow(flow) .flow(flow)
.execution(execution) .execution(execution)
@@ -129,7 +130,7 @@ public class ConditionService {
return this.conditionContext(runContext, flow, execution, null); return this.conditionContext(runContext, flow, execution, null);
} }
boolean valid(Flow flow, List<Condition> list, ConditionContext conditionContext) { boolean valid(FlowInterface flow, List<Condition> list, ConditionContext conditionContext) {
return list return list
.stream() .stream()
.allMatch(condition -> { .allMatch(condition -> {

View File

@@ -11,6 +11,7 @@ import io.kestra.core.models.executions.ExecutionKilledExecution;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.executions.TaskRunAttempt; import io.kestra.core.models.executions.TaskRunAttempt;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.input.InputAndValue; import io.kestra.core.models.flows.input.InputAndValue;
@@ -319,7 +320,7 @@ public class ExecutionService {
} }
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
private Execution markAs(final Execution execution, Flow flow, String taskRunId, State.Type newState, @Nullable Map<String, Object> onResumeInputs) throws Exception { private Execution markAs(final Execution execution, FlowInterface flow, String taskRunId, State.Type newState, @Nullable Map<String, Object> onResumeInputs) throws Exception {
Set<String> taskRunToRestart = this.taskRunToRestart( Set<String> taskRunToRestart = this.taskRunToRestart(
execution, execution,
taskRun -> taskRun.getId().equals(taskRunId) taskRun -> taskRun.getId().equals(taskRunId)
@@ -327,9 +328,11 @@ public class ExecutionService {
Execution newExecution = execution.withMetadata(execution.getMetadata().nextAttempt()); Execution newExecution = execution.withMetadata(execution.getMetadata().nextAttempt());
final FlowWithSource flowWithSource = pluginDefaultService.injectVersionDefaults(flow, false);
for (String s : taskRunToRestart) { for (String s : taskRunToRestart) {
TaskRun originalTaskRun = newExecution.findTaskRunByTaskRunId(s); TaskRun originalTaskRun = newExecution.findTaskRunByTaskRunId(s);
Task task = flow.findTaskByTaskId(originalTaskRun.getTaskId()); Task task = flowWithSource.findTaskByTaskId(originalTaskRun.getTaskId());
boolean isFlowable = task.isFlowable(); boolean isFlowable = task.isFlowable();
if (!isFlowable || s.equals(taskRunId)) { if (!isFlowable || s.equals(taskRunId)) {
@@ -477,7 +480,7 @@ public class ExecutionService {
* @return the execution in the new state. * @return the execution in the new state.
* @throws Exception if the state of the execution cannot be updated * @throws Exception if the state of the execution cannot be updated
*/ */
public Execution resume(Execution execution, Flow flow, State.Type newState) throws Exception { public Execution resume(Execution execution, FlowInterface flow, State.Type newState) throws Exception {
return this.resume(execution, flow, newState, (Map<String, Object>) null); return this.resume(execution, flow, newState, (Map<String, Object>) null);
} }
@@ -490,7 +493,7 @@ public class ExecutionService {
* @param flow the flow of the execution * @param flow the flow of the execution
* @return the execution in the new state. * @return the execution in the new state.
*/ */
public Mono<List<InputAndValue>> validateForResume(final Execution execution, Flow flow) { public Mono<List<InputAndValue>> validateForResume(final Execution execution, FlowInterface flow) {
return getFirstPausedTaskOr(execution, flow) return getFirstPausedTaskOr(execution, flow)
.flatMap(task -> { .flatMap(task -> {
if (task.isPresent() && task.get() instanceof Pause pauseTask) { if (task.isPresent() && task.get() instanceof Pause pauseTask) {
@@ -532,7 +535,7 @@ public class ExecutionService {
* @param inputs the onResume inputs * @param inputs the onResume inputs
* @return the execution in the new state. * @return the execution in the new state.
*/ */
public Mono<Execution> resume(final Execution execution, Flow flow, State.Type newState, @Nullable Publisher<CompletedPart> inputs) { public Mono<Execution> resume(final Execution execution, FlowInterface flow, State.Type newState, @Nullable Publisher<CompletedPart> inputs) {
return getFirstPausedTaskOr(execution, flow) return getFirstPausedTaskOr(execution, flow)
.flatMap(task -> { .flatMap(task -> {
if (task.isPresent() && task.get() instanceof Pause pauseTask) { if (task.isPresent() && task.get() instanceof Pause pauseTask) {
@@ -550,12 +553,14 @@ public class ExecutionService {
}); });
} }
private static Mono<Optional<Task>> getFirstPausedTaskOr(Execution execution, Flow flow){ private Mono<Optional<Task>> getFirstPausedTaskOr(Execution execution, FlowInterface flow){
final FlowWithSource flowWithSource = pluginDefaultService.injectVersionDefaults(flow, false);
return Mono.create(sink -> { return Mono.create(sink -> {
try { try {
var runningTaskRun = execution var runningTaskRun = execution
.findFirstByState(State.Type.PAUSED) .findFirstByState(State.Type.PAUSED)
.map(throwFunction(task -> flow.findTaskByTaskId(task.getTaskId()))); .map(throwFunction(task -> flowWithSource.findTaskByTaskId(task.getTaskId())));
sink.success(runningTaskRun); sink.success(runningTaskRun);
} catch (InternalException e) { } catch (InternalException e) {
sink.error(e); sink.error(e);
@@ -574,7 +579,7 @@ public class ExecutionService {
* @return the execution in the new state. * @return the execution in the new state.
* @throws Exception if the state of the execution cannot be updated * @throws Exception if the state of the execution cannot be updated
*/ */
public Execution resume(final Execution execution, Flow flow, State.Type newState, @Nullable Map<String, Object> inputs) throws Exception { public Execution resume(final Execution execution, FlowInterface flow, State.Type newState, @Nullable Map<String, Object> inputs) throws Exception {
var pausedTaskRun = execution var pausedTaskRun = execution
.findFirstByState(State.Type.PAUSED); .findFirstByState(State.Type.PAUSED);

View File

@@ -1,20 +1,23 @@
package io.kestra.core.services; package io.kestra.core.services;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException; import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.plugins.PluginRegistry; import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.utils.ListUtils; import io.kestra.core.utils.ListUtils;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.inject.Singleton; import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ClassUtils; import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
@@ -22,7 +25,17 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.*; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream; import java.util.stream.IntStream;
@@ -30,27 +43,99 @@ import java.util.stream.Stream;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
/** /**
* Provides business logic to manipulate {@link Flow} * Provides business logic for manipulating flow objects.
*/ */
@Singleton @Singleton
@Slf4j @Slf4j
public class FlowService { public class FlowService {
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofJson()
.copy()
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
@Inject @Inject
Optional<FlowRepositoryInterface> flowRepository; Optional<FlowRepositoryInterface> flowRepository;
@Inject
YamlParser yamlParser;
@Inject @Inject
PluginDefaultService pluginDefaultService; PluginDefaultService pluginDefaultService;
@Inject @Inject
PluginRegistry pluginRegistry; PluginRegistry pluginRegistry;
@Inject
ModelValidator modelValidator;
/**
* Validates and creates the given flow.
* <p>
* The validation of the flow is done from the source after injecting all plugin default values.
*
* @param flow The flow.
* @param strictValidation Specifies whether to perform a strict validation of the flow.
* @return The created {@link FlowWithSource}.
*/
public FlowWithSource create(final GenericFlow flow, final boolean strictValidation) {
Objects.requireNonNull(flow, "Cannot create null flow");
if (flow.getSource() == null || flow.getSource().isBlank()) {
throw new IllegalArgumentException("Cannot create flow with null or blank source");
}
// Check Flow with defaults
FlowWithSource flowWithDefault = pluginDefaultService.injectAllDefaults(flow, strictValidation);
modelValidator.validate(flowWithDefault);
return repository().create(flow);
}
private FlowRepositoryInterface repository() {
return flowRepository
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
}
/**
* Validates the given flow source.
* <p>
* the YAML source can contain one or many objects.
*
* @param tenantId The tenant identifier.
* @param flows The YAML source.
* @return The list validation constraint violations.
*/
public List<ValidateConstraintViolation> validate(final String tenantId, final String flows) {
AtomicInteger index = new AtomicInteger(0);
return Stream
.of(flows.split("\\n+---\\n*?"))
.map(source -> {
ValidateConstraintViolation.ValidateConstraintViolationBuilder<?, ?> validateConstraintViolationBuilder = ValidateConstraintViolation.builder();
validateConstraintViolationBuilder.index(index.getAndIncrement());
try {
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, source, true);
Integer sentRevision = flow.getRevision();
if (sentRevision != null) {
Integer lastRevision = Optional.ofNullable(repository().lastRevision(tenantId, flow.getNamespace(), flow.getId()))
.orElse(0);
validateConstraintViolationBuilder.outdated(!sentRevision.equals(lastRevision + 1));
}
validateConstraintViolationBuilder.deprecationPaths(deprecationPaths(flow));
validateConstraintViolationBuilder.warnings(warnings(flow, tenantId));
validateConstraintViolationBuilder.infos(relocations(source).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList());
validateConstraintViolationBuilder.flow(flow.getId());
validateConstraintViolationBuilder.namespace(flow.getNamespace());
modelValidator.validate(flow);
} catch (ConstraintViolationException e) {
validateConstraintViolationBuilder.constraints(e.getMessage());
} catch (RuntimeException re) {
// In case of any error, we add a validation violation so the error is displayed in the UI.
// We may change that by throwing an internal error and handle it in the UI, but this should not occur except for rare cases
// in dev like incompatible plugin versions.
log.error("Unable to validate the flow", re);
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + re.getMessage());
}
return validateConstraintViolationBuilder.build();
})
.collect(Collectors.toList());
}
public FlowWithSource importFlow(String tenantId, String source) { public FlowWithSource importFlow(String tenantId, String source) {
return this.importFlow(tenantId, source, false); return this.importFlow(tenantId, source, false);
} }
@@ -60,29 +145,33 @@ public class FlowService {
throw noRepositoryException(); throw noRepositoryException();
} }
FlowWithSource withTenant = yamlParser.parse(source, Flow.class).toBuilder() final GenericFlow flow = GenericFlow.fromYaml(tenantId, source);
.tenantId(tenantId)
.build()
.withSource(source);
FlowRepositoryInterface flowRepository = this.flowRepository.get(); FlowRepositoryInterface flowRepository = this.flowRepository.get();
Optional<FlowWithSource> flowWithSource = flowRepository Optional<FlowWithSource> maybeExisting = flowRepository.findByIdWithSource(
.findByIdWithSource(withTenant.getTenantId(), withTenant.getNamespace(), withTenant.getId(), Optional.empty(), true); flow.getTenantId(),
if (dryRun) { flow.getNamespace(),
return flowWithSource flow.getId(),
.map(previous -> { Optional.empty(),
if (previous.equals(withTenant, source) && !previous.isDeleted()) { true
return previous; );
} else {
return FlowWithSource.of(withTenant.toBuilder().revision(previous.getRevision() + 1).build(), source);
}
})
.orElseGet(() -> FlowWithSource.of(withTenant, source).toBuilder().revision(1).build());
}
return flowWithSource // Inject default plugin 'version' props before converting
.map(previous -> flowRepository.update(withTenant, previous, source, pluginDefaultService.injectDefaults(withTenant))) // to flow to correctly resolve all plugin type.
.orElseGet(() -> flowRepository.create(withTenant, source, pluginDefaultService.injectDefaults(withTenant))); FlowWithSource flowToImport = pluginDefaultService.injectVersionDefaults(flow, false);
if (dryRun) {
return maybeExisting
.map(previous -> previous.isSameWithSource(flowToImport) && !previous.isDeleted() ?
previous :
FlowWithSource.of(flowToImport.toBuilder().revision(previous.getRevision() + 1).build(), source)
)
.orElseGet(() -> FlowWithSource.of(flowToImport, source).toBuilder().revision(1).build());
} else {
return maybeExisting
.map(previous -> flowRepository.update(flow, previous))
.orElseGet(() -> flowRepository.create(flow));
}
} }
public List<FlowWithSource> findByNamespaceWithSource(String tenantId, String namespace) { public List<FlowWithSource> findByNamespaceWithSource(String tenantId, String namespace) {
@@ -117,7 +206,7 @@ public class FlowService {
return flowRepository.get().findById(tenantId, namespace, flowId); return flowRepository.get().findById(tenantId, namespace, flowId);
} }
public Stream<FlowWithSource> keepLastVersion(Stream<FlowWithSource> stream) { public Stream<FlowInterface> keepLastVersion(Stream<FlowInterface> stream) {
return keepLastVersionCollector(stream); return keepLastVersionCollector(stream);
} }
@@ -262,17 +351,17 @@ public class FlowService {
.filter(method -> !Modifier.isStatic(method.getModifiers())); .filter(method -> !Modifier.isStatic(method.getModifiers()));
} }
public Collection<FlowWithSource> keepLastVersion(List<FlowWithSource> flows) { public Collection<FlowInterface> keepLastVersion(List<FlowInterface> flows) {
return keepLastVersionCollector(flows.stream()).toList(); return keepLastVersionCollector(flows.stream()).toList();
} }
public Stream<FlowWithSource> keepLastVersionCollector(Stream<FlowWithSource> stream) { public Stream<FlowInterface> keepLastVersionCollector(Stream<FlowInterface> stream) {
// Use a Map to track the latest version of each flow // Use a Map to track the latest version of each flow
Map<String, FlowWithSource> latestFlows = new HashMap<>(); Map<String, FlowInterface> latestFlows = new HashMap<>();
stream.forEach(flow -> { stream.forEach(flow -> {
String uid = flow.uidWithoutRevision(); String uid = flow.uidWithoutRevision();
FlowWithSource existing = latestFlows.get(uid); FlowInterface existing = latestFlows.get(uid);
// Update only if the current flow has a higher revision // Update only if the current flow has a higher revision
if (existing == null || flow.getRevision() > existing.getRevision()) { if (existing == null || flow.getRevision() > existing.getRevision()) {
@@ -289,7 +378,7 @@ public class FlowService {
protected boolean removeUnwanted(Flow f, Execution execution) { protected boolean removeUnwanted(Flow f, Execution execution) {
// we don't allow recursive // we don't allow recursive
return !f.uidWithoutRevision().equals(Flow.uidWithoutRevision(execution)); return !f.uidWithoutRevision().equals(FlowId.uidWithoutRevision(execution));
} }
public static List<AbstractTrigger> findRemovedTrigger(Flow flow, Flow previous) { public static List<AbstractTrigger> findRemovedTrigger(Flow flow, Flow previous) {
@@ -327,22 +416,6 @@ public class FlowService {
return source + String.format("\ndisabled: %s", disabled); return source + String.format("\ndisabled: %s", disabled);
} }
public static String generateSource(Flow flow) {
try {
String json = NON_DEFAULT_OBJECT_MAPPER.writeValueAsString(flow);
Object map = fixSnakeYaml(JacksonMapper.toMap(json));
String source = JacksonMapper.ofYaml().writeValueAsString(map);
// remove the revision from the generated source
return source.replaceFirst("(?m)^revision: \\d+\n?","");
} catch (JsonProcessingException e) {
log.warn("Unable to convert flow json '{}' '{}'({})", flow.getNamespace(), flow.getId(), flow.getRevision(), e);
return null;
}
}
// Used in Git plugin // Used in Git plugin
public List<Flow> findByNamespacePrefix(String tenantId, String namespacePrefix) { public List<Flow> findByNamespacePrefix(String tenantId, String namespacePrefix) {
if (flowRepository.isEmpty()) { if (flowRepository.isEmpty()) {
@@ -361,50 +434,6 @@ public class FlowService {
return flowRepository.get().delete(flow); return flowRepository.get().delete(flow);
} }
/**
* Dirty hack but only concern previous flow with no source code in org.yaml.snakeyaml.emitter.Emitter:
* <pre>
* if (previousSpace) {
* spaceBreak = true;
* }
* </pre>
* This control will detect ` \n` as a no valid entry on a string and will break the multiline to transform in single line
*
* @param object the object to fix
* @return the modified object
*/
private static Object fixSnakeYaml(Object object) {
if (object instanceof Map<?, ?> mapValue) {
return mapValue
.entrySet()
.stream()
.map(entry -> new AbstractMap.SimpleEntry<>(
fixSnakeYaml(entry.getKey()),
fixSnakeYaml(entry.getValue())
))
.filter(entry -> entry.getValue() != null)
.collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue,
(u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
},
LinkedHashMap::new
));
} else if (object instanceof Collection<?> collectionValue) {
return collectionValue
.stream()
.map(FlowService::fixSnakeYaml)
.toList();
} else if (object instanceof String item) {
if (item.contains("\n")) {
return item.replaceAll("\\s+\\n", "\\\n");
}
}
return object;
}
/** /**
* Return true if the namespace is allowed from the namespace denoted by 'fromTenant' and 'fromNamespace'. * Return true if the namespace is allowed from the namespace denoted by 'fromTenant' and 'fromNamespace'.
* As namespace restriction is an EE feature, this will always return true in OSS. * As namespace restriction is an EE feature, this will always return true in OSS.

View File

@@ -49,7 +49,7 @@ public class FlowTriggerService {
.map(io.kestra.plugin.core.trigger.Flow.class::cast); .map(io.kestra.plugin.core.trigger.Flow.class::cast);
} }
public List<Execution> computeExecutionsFromFlowTriggers(Execution execution, List<Flow> allFlows, Optional<MultipleConditionStorageInterface> multipleConditionStorage) { public List<Execution> computeExecutionsFromFlowTriggers(Execution execution, List<? extends Flow> allFlows, Optional<MultipleConditionStorageInterface> multipleConditionStorage) {
List<FlowWithFlowTrigger> validTriggersBeforeMultipleConditionEval = allFlows.stream() List<FlowWithFlowTrigger> validTriggersBeforeMultipleConditionEval = allFlows.stream()
// prevent recursive flow triggers // prevent recursive flow triggers
.filter(flow -> flowService.removeUnwanted(flow, execution)) .filter(flow -> flowService.removeUnwanted(flow, execution))

View File

@@ -56,7 +56,7 @@ public class GraphService {
public GraphCluster of(GraphCluster baseGraph, FlowWithSource flow, List<String> expandedSubflows, Map<String, FlowWithSource> flowByUid, Execution execution) throws IllegalVariableEvaluationException { public GraphCluster of(GraphCluster baseGraph, FlowWithSource flow, List<String> expandedSubflows, Map<String, FlowWithSource> flowByUid, Execution execution) throws IllegalVariableEvaluationException {
String tenantId = flow.getTenantId(); String tenantId = flow.getTenantId();
flow = pluginDefaultService.injectDefaults(flow); flow = pluginDefaultService.injectAllDefaults(flow, false);
List<Trigger> triggers = null; List<Trigger> triggers = null;
if (flow.getTriggers() != null) { if (flow.getTriggers() != null) {
triggers = triggerRepository.find(Pageable.UNPAGED, null, tenantId, flow.getNamespace(), flow.getId(), null); triggers = triggerRepository.find(Pageable.UNPAGED, null, tenantId, flow.getNamespace(), flow.getId(), null);
@@ -120,7 +120,7 @@ public class GraphService {
)); ));
} }
); );
subflow = pluginDefaultService.injectDefaults(subflow); subflow = pluginDefaultService.injectAllDefaults(subflow, false);
SubflowGraphTask finalSubflowGraphTask = subflowGraphTask; SubflowGraphTask finalSubflowGraphTask = subflowGraphTask;
return new TaskToClusterReplacer( return new TaskToClusterReplacer(

View File

@@ -2,7 +2,7 @@ package io.kestra.core.services;
import io.kestra.core.exceptions.IllegalVariableEvaluationException; import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.utils.ListUtils; import io.kestra.core.utils.ListUtils;
@@ -17,7 +17,7 @@ public final class LabelService {
/** /**
* Return flow labels excluding system labels. * Return flow labels excluding system labels.
*/ */
public static List<Label> labelsExcludingSystem(Flow flow) { public static List<Label> labelsExcludingSystem(FlowInterface flow) {
return ListUtils.emptyOnNull(flow.getLabels()).stream().filter(label -> !label.key().startsWith(Label.SYSTEM_PREFIX)).toList(); return ListUtils.emptyOnNull(flow.getLabels()).stream().filter(label -> !label.key().startsWith(Label.SYSTEM_PREFIX)).toList();
} }
@@ -27,7 +27,7 @@ public final class LabelService {
* Trigger labels will be rendered via the run context but not flow labels. * Trigger labels will be rendered via the run context but not flow labels.
* In case rendering is not possible, the label will be omitted. * In case rendering is not possible, the label will be omitted.
*/ */
public static List<Label> fromTrigger(RunContext runContext, Flow flow, AbstractTrigger trigger) { public static List<Label> fromTrigger(RunContext runContext, FlowInterface flow, AbstractTrigger trigger) {
final List<Label> labels = new ArrayList<>(); final List<Label> labels = new ArrayList<>();
if (flow.getLabels() != null) { if (flow.getLabels() != null) {

View File

@@ -3,7 +3,8 @@ package io.kestra.core.services;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.triggers.TriggerContext; import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.repositories.LogRepositoryInterface; import io.kestra.core.repositories.LogRepositoryInterface;
import io.micronaut.context.annotation.Value; import io.micronaut.context.annotation.Value;
@@ -39,7 +40,7 @@ public class LogService {
@Inject @Inject
private LogRepositoryInterface logRepository; private LogRepositoryInterface logRepository;
public void logExecution(Flow flow, Logger logger, Level level, String message, Object... args) { public void logExecution(FlowId flow, Logger logger, Level level, String message, Object... args) {
String finalMsg = tenantEnabled ? FLOW_PREFIX_WITH_TENANT + message : FLOW_PREFIX_NO_TENANT + message; String finalMsg = tenantEnabled ? FLOW_PREFIX_WITH_TENANT + message : FLOW_PREFIX_NO_TENANT + message;
Object[] executionArgs = tenantEnabled ? Object[] executionArgs = tenantEnabled ?
new Object[] { flow.getTenantId(), flow.getNamespace(), flow.getId() } : new Object[] { flow.getTenantId(), flow.getNamespace(), flow.getId() } :

View File

@@ -2,13 +2,17 @@ package io.kestra.core.services;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.Plugin; import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.PluginDefault; import io.kestra.core.models.flows.PluginDefault;
import io.kestra.core.plugins.PluginRegistry; import io.kestra.core.plugins.PluginRegistry;
@@ -19,22 +23,34 @@ import io.kestra.core.runners.RunContextLogger;
import io.kestra.core.serializers.JacksonMapper; import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.serializers.YamlParser; import io.kestra.core.serializers.YamlParser;
import io.kestra.core.utils.MapUtils; import io.kestra.core.utils.MapUtils;
import io.kestra.plugin.core.flow.Template;
import io.micronaut.core.annotation.Nullable; import io.micronaut.core.annotation.Nullable;
import jakarta.annotation.PostConstruct; import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Provider;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.event.Level;
import java.util.*; import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import jakarta.inject.Inject; /**
import jakarta.inject.Named; * Services for parsing flows and injecting plugin default values.
import jakarta.inject.Singleton; */
import jakarta.validation.ConstraintViolationException;
@Singleton @Singleton
@Slf4j @Slf4j
public class PluginDefaultService { public class PluginDefaultService {
@@ -44,6 +60,10 @@ public class PluginDefaultService {
private static final ObjectMapper OBJECT_MAPPER = JacksonMapper.ofYaml().copy() private static final ObjectMapper OBJECT_MAPPER = JacksonMapper.ofYaml().copy()
.setSerializationInclusion(JsonInclude.Include.NON_NULL); .setSerializationInclusion(JsonInclude.Include.NON_NULL);
private static final String PLUGIN_DEFAULTS_FIELD = "pluginDefaults";
private static final TypeReference<List<PluginDefault>> PLUGIN_DEFAULTS_TYPE_REF = new TypeReference<>() {
};
@Nullable @Nullable
@Inject @Inject
@@ -53,16 +73,16 @@ public class PluginDefaultService {
@Inject @Inject
protected PluginGlobalDefaultConfiguration pluginGlobalDefault; protected PluginGlobalDefaultConfiguration pluginGlobalDefault;
@Inject
protected YamlParser yamlParser;
@Inject @Inject
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED) @Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
@Nullable @Nullable
protected QueueInterface<LogEntry> logQueue; protected QueueInterface<LogEntry> logQueue;
@Inject @Inject
private PluginRegistry pluginRegistry; protected PluginRegistry pluginRegistry;
@Inject
protected Provider<LogService> logService; // lazy-init
private final AtomicBoolean warnOnce = new AtomicBoolean(false); private final AtomicBoolean warnOnce = new AtomicBoolean(false);
@@ -83,38 +103,69 @@ public class PluginDefaultService {
} }
/** /**
* Gets all the defaults values for the given flow.
*
* @param flow the flow to extract default * @param flow the flow to extract default
* @return list of {@code PluginDefault} ordered by most important first * @return list of {@code PluginDefault} ordered by most important first
*/ */
protected List<PluginDefault> mergeAllDefaults(Flow flow) { protected List<PluginDefault> getAllDefaults(final String tenantId,
List<PluginDefault> list = new ArrayList<>(); final String namespace,
final Map<String, Object> flow) {
List<PluginDefault> defaults = new ArrayList<>();
defaults.addAll(getFlowDefaults(flow));
defaults.addAll(getGlobalDefaults());
return defaults;
}
if (flow.getPluginDefaults() != null) { /**
list.addAll(flow.getPluginDefaults()); * Gets the flow-level defaults values.
*
* @param flow the flow to extract default
* @return list of {@code PluginDefault} ordered by most important first
*/
protected List<PluginDefault> getFlowDefaults(final Map<String, Object> flow) {
Object defaults = flow.get(PLUGIN_DEFAULTS_FIELD);
if (defaults != null) {
return OBJECT_MAPPER.convertValue(defaults, PLUGIN_DEFAULTS_TYPE_REF);
} else {
return List.of();
} }
}
/**
* Gets the global defaults values.
*
* @return list of {@code PluginDefault} ordered by most important first
*/
protected List<PluginDefault> getGlobalDefaults() {
List<PluginDefault> defaults = new ArrayList<>();
if (taskGlobalDefault != null && taskGlobalDefault.getDefaults() != null) { if (taskGlobalDefault != null && taskGlobalDefault.getDefaults() != null) {
if (warnOnce.compareAndSet(false, true)) { if (warnOnce.compareAndSet(false, true)) {
log.warn("Global Task Defaults are deprecated, please use Global Plugin Defaults instead via the 'kestra.plugins.defaults' configuration property."); log.warn("Global Task Defaults are deprecated, please use Global Plugin Defaults instead via the 'kestra.plugins.defaults' configuration property.");
} }
list.addAll(taskGlobalDefault.getDefaults()); defaults.addAll(taskGlobalDefault.getDefaults());
} }
if (pluginGlobalDefault != null && pluginGlobalDefault.getDefaults() != null) { if (pluginGlobalDefault != null && pluginGlobalDefault.getDefaults() != null) {
list.addAll(pluginGlobalDefault.getDefaults()); defaults.addAll(pluginGlobalDefault.getDefaults());
} }
return defaults;
return list;
} }
/** /**
* Inject plugin defaults into a Flow. * Parses the given abstract flow and injects all default values, returning a parsed {@link FlowWithSource}.
* In case of exception, the flow is returned as is, *
* then a logger is created based on the execution to be able to log an exception in the execution logs. * <p>
* If an exception occurs during parsing, the original flow is returned unchanged, and the exception is logged
* for the passed {@code execution}
* </p>
*
* @return a parsed {@link FlowWithSource}, or a {@link FlowWithException} if parsing fails
*/ */
public FlowWithSource injectDefaults(FlowWithSource flow, Execution execution) { public FlowWithSource injectDefaults(FlowInterface flow, Execution execution) {
try { try {
return this.injectDefaults(flow); return this.injectAllDefaults(flow, false);
} catch (Exception e) { } catch (Exception e) {
RunContextLogger RunContextLogger
.logEntries( .logEntries(
@@ -128,17 +179,22 @@ public class PluginDefaultService {
// silently do nothing // silently do nothing
} }
}); });
return flow; return readWithoutDefaultsOrThrow(flow);
} }
} }
/** /**
* @deprecated use {@link #injectDefaults(FlowWithSource, Logger)} instead * Parses the given abstract flow and injects all default values, returning a parsed {@link FlowWithSource}.
*
* <p>
* If an exception occurs during parsing, the original flow is returned unchanged, and the exception is logged.
* </p>
*
* @return a parsed {@link FlowWithSource}, or a {@link FlowWithException} if parsing fails
*/ */
@Deprecated(forRemoval = true, since = "0.20") public FlowWithSource injectAllDefaults(FlowInterface flow, Logger logger) {
public Flow injectDefaults(Flow flow, Logger logger) {
try { try {
return this.injectDefaults(flow); return this.injectAllDefaults(flow, false);
} catch (Exception e) { } catch (Exception e) {
logger.warn( logger.warn(
"Can't inject plugin defaults on tenant {}, namespace '{}', flow '{}' with errors '{}'", "Can't inject plugin defaults on tenant {}, namespace '{}', flow '{}' with errors '{}'",
@@ -148,80 +204,207 @@ public class PluginDefaultService {
e.getMessage(), e.getMessage(),
e e
); );
return flow; return readWithoutDefaultsOrThrow(flow);
} }
} }
/** private static FlowWithSource readWithoutDefaultsOrThrow(final FlowInterface flow) {
* Inject plugin defaults into a Flow. if (flow instanceof FlowWithSource item) {
* In case of exception, the flow is returned as is, then the logger is used to log the exception. return item;
*/ }
public FlowWithSource injectDefaults(FlowWithSource flow, Logger logger) {
if (flow instanceof Flow item) {
return FlowWithSource.of(item, item.sourceOrGenerateIfNull());
}
// The block below should only be reached during testing for failure scenarios
try { try {
return this.injectDefaults(flow); Flow parsed = NON_DEFAULT_OBJECT_MAPPER.readValue(flow.getSource(), Flow.class);
} catch (Exception e) { return FlowWithSource.of(parsed, flow.getSource());
logger.warn(
"Can't inject plugin defaults on tenant {}, namespace '{}', flow '{}' with errors '{}'",
flow.getTenantId(),
flow.getNamespace(),
flow.getId(),
e.getMessage(),
e
);
return flow;
}
}
/**
* @deprecated use {@link #injectDefaults(FlowWithSource)} instead
*/
@Deprecated(forRemoval = true, since = "0.20")
public Flow injectDefaults(Flow flow) throws ConstraintViolationException {
if (flow instanceof FlowWithSource flowWithSource) {
return this.injectDefaults(flowWithSource);
}
Map<String, Object> flowAsMap = NON_DEFAULT_OBJECT_MAPPER.convertValue(flow, JacksonMapper.MAP_TYPE_REFERENCE);
return innerInjectDefault(flow, flowAsMap);
}
/**
* Inject plugin defaults into a Flow.
*/
public FlowWithSource injectDefaults(FlowWithSource flow) throws ConstraintViolationException {
try {
String source = flow.getSource();
if (source == null) {
// Flow revisions created from older Kestra versions may not be linked to their original source.
// In such cases, fall back to the generated source approach to enable plugin default injection.
source = flow.generateSource();
}
if (source == null) {
// return immediately if source is still null (should never happen)
return flow;
}
Map<String, Object> flowAsMap = OBJECT_MAPPER.readValue(source, JacksonMapper.MAP_TYPE_REFERENCE);
Flow withDefault = innerInjectDefault(flow, flowAsMap);
// revision and tenants are not in the source, so we copy them manually
return withDefault.toBuilder()
.tenantId(flow.getTenantId())
.revision(flow.getRevision())
.build()
.withSource(source);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
throw new RuntimeException(e); throw new KestraRuntimeException("Failed to read flow from source", e);
} }
} }
/**
* Parses the given abstract flow and injects all default values, returning a parsed {@link FlowWithSource}.
*
* <p>
* If {@code strictParsing} is {@code true}, the parsing will fail in the following cases:
* </p>
* <ul>
* <li>The source contains duplicate properties.</li>
* <li>The source contains unknown properties.</li>
* </ul>
*
* @param flow the flow to be parsed
* @return a parsed {@link FlowWithSource}
*
* @throws ConstraintViolationException if {@code strictParsing} is {@code true} and the source does not meet strict validation requirements
* @throws KestraRuntimeException if an error occurs while parsing the flow and it cannot be processed
*/
public FlowWithSource injectAllDefaults(final FlowInterface flow, final boolean strictParsing) {
// Flow revisions created from older Kestra versions may not be linked to their original source.
// In such cases, fall back to the generated source approach to enable plugin default injection.
String source = flow.sourceOrGenerateIfNull();
if (source == null) {
// This should never happen
String error = "Cannot apply plugin defaults. Cause: flow has no defined source.";
logService.get().logExecution(flow, log, Level.ERROR, error);
throw new IllegalArgumentException(error);
}
return parseFlowWithAllDefaults(
flow.getTenantId(),
flow.getNamespace(),
flow.getRevision(),
flow.isDeleted(),
source,
false,
strictParsing
);
}
/**
* Parses the given abstract flow and injects default plugin versions, returning a parsed {@link FlowWithSource}.
*
* <p>
* If the provided flow already represents a concrete {@link FlowWithSource}, it is returned as is.
* <p/>
*
* <p>
* If {@code safe} is set to {@code true} and the given flow cannot be parsed,
* this method returns a {@link FlowWithException} instead of throwing an error.
* <p/>
*
* @param flow the flow to be parsed
* @param safe whether parsing errors should be handled gracefully
* @return a parsed {@link FlowWithSource}, or a {@link FlowWithException} if parsing fails and {@code safe} is {@code true}
*/
public FlowWithSource injectVersionDefaults(final FlowInterface flow, final boolean safe) {
if (flow instanceof FlowWithSource flowWithSource) {
// shortcut - if the flow is already fully parsed return it immediately.
return flowWithSource;
}
FlowWithSource result;
String source = flow.getSource();
try {
if (source == null) {
source = OBJECT_MAPPER.writeValueAsString(flow);
}
result = parseFlowWithAllDefaults(flow.getTenantId(), flow.getNamespace(), flow.getRevision(), flow.isDeleted(), source, true, false);
} catch (Exception e) {
if (safe) {
logService.get().logExecution(flow, log, Level.ERROR, "Failed to read flow.", e);
result = FlowWithException.from(flow, e);
// deleted is not part of the original 'source'
result = result.toBuilder().deleted(flow.isDeleted()).build();
} else {
throw new KestraRuntimeException(e);
}
}
return result;
}
public Map<String, Object> injectVersionDefaults(@Nullable final String tenantId,
final String namespace,
final Map<String, Object> mapFlow) {
return innerInjectDefault(tenantId, namespace, mapFlow, true);
}
/**
* Parses and injects default into the given flow.
*
* @param tenantId the Tenant ID.
* @param source the flow source.
* @return a new {@link FlowWithSource}.
*
* @throws ConstraintViolationException when parsing flow.
*/
public FlowWithSource parseFlowWithAllDefaults(@Nullable final String tenantId, final String source, final boolean strict) throws ConstraintViolationException {
return parseFlowWithAllDefaults(tenantId, null, null, false, source, false, strict);
}
/**
* Parses and injects defaults into the given flow.
*
* @param tenant the tenant identifier.
* @param namespace the namespace.
* @param revision the flow revision.
* @param source the flow source.
* @return a new {@link FlowWithSource}.
*
* @throws ConstraintViolationException when parsing flow.
*/
private FlowWithSource parseFlowWithAllDefaults(@Nullable final String tenant,
@Nullable String namespace,
@Nullable Integer revision,
final boolean isDeleted,
final String source,
final boolean onlyVersions,
final boolean strictParsing) throws ConstraintViolationException {
try {
Map<String, Object> mapFlow = OBJECT_MAPPER.readValue(source, JacksonMapper.MAP_TYPE_REFERENCE);
namespace = namespace == null ? (String) mapFlow.get("namespace") : namespace;
revision = revision == null ? (Integer) mapFlow.get("revision") : revision;
mapFlow = innerInjectDefault(tenant, namespace, mapFlow, onlyVersions);
FlowWithSource withDefault = YamlParser.parse(mapFlow, FlowWithSource.class, strictParsing);
// revision, tenants, and deleted are not in the 'source', so we copy them manually
FlowWithSource full = withDefault.toBuilder()
.tenantId(tenant)
.revision(revision)
.deleted(isDeleted)
.source(source)
.build();
if (tenant != null) {
// This is a hack to set the tenant in template tasks.
// When using the Template task, we need the tenant to fetch the Template from the database.
// However, as the task is executed on the Executor we cannot retrieve it from the tenant service and have no other options.
// So we save it at flow creation/updating time.
full.allTasksWithChilds().stream().filter(task -> task instanceof Template).forEach(task -> ((Template) task).setTenantId(tenant));
}
return full;
} catch (JsonProcessingException e) {
throw new KestraRuntimeException(e);
}
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private Flow innerInjectDefault(Flow flow, Map<String, Object> flowAsMap) { private Map<String, Object> innerInjectDefault(final String tenantId, final String namespace, Map<String, Object> flowAsMap, final boolean onlyVersions) {
List<PluginDefault> allDefaults = mergeAllDefaults(flow); List<PluginDefault> allDefaults = getAllDefaults(tenantId, namespace, flowAsMap);
if (onlyVersions) {
// filter only default 'version' property
allDefaults = allDefaults.stream()
.map(defaults -> {
Map<String, Object> filtered = defaults.getValues().entrySet()
.stream().filter(entry -> entry.getKey().equals("version"))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
return filtered.isEmpty() ? null : defaults.toBuilder().values(filtered).build();
})
.filter(Objects::nonNull)
.collect(Collectors.toCollection(ArrayList::new));
}
if (allDefaults.isEmpty()) {
// no defaults to inject - return immediately.
return flowAsMap;
}
addAliases(allDefaults); addAliases(allDefaults);
Map<Boolean, List<PluginDefault>> allDefaultsGroup = allDefaults Map<Boolean, List<PluginDefault>> allDefaultsGroup = allDefaults
.stream() .stream()
.collect(Collectors.groupingBy(PluginDefault::isForced, Collectors.toList())); .collect(Collectors.groupingBy(PluginDefault::isForced, Collectors.toList()));
@@ -232,9 +415,9 @@ public class PluginDefaultService {
// forced plugin default need to be reverse, lower win // forced plugin default need to be reverse, lower win
Map<String, List<PluginDefault>> forced = pluginDefaultsToMap(Lists.reverse(allDefaultsGroup.getOrDefault(true, Collections.emptyList()))); Map<String, List<PluginDefault>> forced = pluginDefaultsToMap(Lists.reverse(allDefaultsGroup.getOrDefault(true, Collections.emptyList())));
Object pluginDefaults = flowAsMap.get("pluginDefaults"); Object pluginDefaults = flowAsMap.get(PLUGIN_DEFAULTS_FIELD);
if (pluginDefaults != null) { if (pluginDefaults != null) {
flowAsMap.remove("pluginDefaults"); flowAsMap.remove(PLUGIN_DEFAULTS_FIELD);
} }
// we apply default and overwrite with forced // we apply default and overwrite with forced
@@ -247,10 +430,11 @@ public class PluginDefaultService {
} }
if (pluginDefaults != null) { if (pluginDefaults != null) {
flowAsMap.put("pluginDefaults", pluginDefaults); flowAsMap.put(PLUGIN_DEFAULTS_FIELD, pluginDefaults);
} }
return yamlParser.parse(flowAsMap, Flow.class, false); return flowAsMap;
} }
/** /**
@@ -260,7 +444,7 @@ public class PluginDefaultService {
* validation will be disabled as we cannot differentiate between a prefix or an unknown type. * validation will be disabled as we cannot differentiate between a prefix or an unknown type.
*/ */
public List<String> validateDefault(PluginDefault pluginDefault) { public List<String> validateDefault(PluginDefault pluginDefault) {
Class<? extends Plugin> classByIdentifier = pluginRegistry.findClassByIdentifier(pluginDefault.getType()); Class<? extends Plugin> classByIdentifier = getClassByIdentifier(pluginDefault);
if (classByIdentifier == null) { if (classByIdentifier == null) {
// this can either be a prefix or a non-existing plugin, in both cases we cannot validate in detail // this can either be a prefix or a non-existing plugin, in both cases we cannot validate in detail
return Collections.emptyList(); return Collections.emptyList();
@@ -283,6 +467,10 @@ public class PluginDefaultService {
.toList(); .toList();
} }
protected Class<? extends Plugin> getClassByIdentifier(PluginDefault pluginDefault) {
return pluginRegistry.findClassByIdentifier(pluginDefault.getType());
}
private Map<String, List<PluginDefault>> pluginDefaultsToMap(List<PluginDefault> pluginDefaults) { private Map<String, List<PluginDefault>> pluginDefaultsToMap(List<PluginDefault> pluginDefaults) {
return pluginDefaults return pluginDefaults
.stream() .stream()
@@ -292,7 +480,7 @@ public class PluginDefaultService {
private void addAliases(List<PluginDefault> allDefaults) { private void addAliases(List<PluginDefault> allDefaults) {
List<PluginDefault> aliasedPluginDefault = allDefaults.stream() List<PluginDefault> aliasedPluginDefault = allDefaults.stream()
.map(pluginDefault -> { .map(pluginDefault -> {
Class<? extends Plugin> classByIdentifier = pluginRegistry.findClassByIdentifier(pluginDefault.getType()); Class<? extends Plugin> classByIdentifier = getClassByIdentifier(pluginDefault);
return classByIdentifier != null && !pluginDefault.getType().equals(classByIdentifier.getTypeName()) ? pluginDefault.toBuilder().type(classByIdentifier.getTypeName()).build() : null; return classByIdentifier != null && !pluginDefault.getType().equals(classByIdentifier.getTypeName()) ? pluginDefault.toBuilder().type(classByIdentifier.getTypeName()).build() : null;
}) })
.filter(Objects::nonNull) .filter(Objects::nonNull)
@@ -357,4 +545,42 @@ public class PluginDefaultService {
return result; return result;
} }
// -----------------------------------------------------------------------------------------------------------------
// DEPRECATED
// -----------------------------------------------------------------------------------------------------------------
/**
* @deprecated use {@link #injectAllDefaults(FlowInterface, Logger)} instead
*/
@Deprecated(forRemoval = true, since = "0.20")
public Flow injectDefaults(Flow flow, Logger logger) {
try {
return this.injectDefaults(flow);
} catch (Exception e) {
logger.warn(
"Can't inject plugin defaults on tenant {}, namespace '{}', flow '{}' with errors '{}'",
flow.getTenantId(),
flow.getNamespace(),
flow.getId(),
e.getMessage(),
e
);
return flow;
}
}
/**
* @deprecated use {@link #injectAllDefaults(FlowInterface, boolean)} instead
*/
@Deprecated(forRemoval = true, since = "0.20")
public Flow injectDefaults(Flow flow) throws ConstraintViolationException {
if (flow instanceof FlowWithSource flowWithSource) {
return this.injectAllDefaults(flowWithSource, false);
}
Map<String, Object> mapFlow = NON_DEFAULT_OBJECT_MAPPER.convertValue(flow, JacksonMapper.MAP_TYPE_REFERENCE);
mapFlow = innerInjectDefault(flow.getTenantId(), flow.getNamespace(), mapFlow, false);
return YamlParser.parse(mapFlow, Flow.class, false);
}
} }

View File

@@ -1,8 +1,11 @@
package io.kestra.core.topologies; package io.kestra.core.topologies;
import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.conditions.Condition; import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.hierarchies.Graph; import io.kestra.core.models.hierarchies.Graph;
import io.kestra.core.models.tasks.ExecutableTask; import io.kestra.core.models.tasks.ExecutableTask;
@@ -140,7 +143,8 @@ public class FlowTopologyService {
} }
@Nullable @Nullable
public FlowRelation isChild(FlowWithSource parent, FlowWithSource child) { @VisibleForTesting
public FlowRelation isChild(Flow parent, Flow child) {
if (this.isFlowTaskChild(parent, child)) { if (this.isFlowTaskChild(parent, child)) {
return FlowRelation.FLOW_TASK; return FlowRelation.FLOW_TASK;
} }
@@ -152,7 +156,7 @@ public class FlowTopologyService {
return null; return null;
} }
protected boolean isFlowTaskChild(FlowWithSource parent, FlowWithSource child) { protected boolean isFlowTaskChild(Flow parent, Flow child) {
try { try {
return parent return parent
.allTasksWithChilds() .allTasksWithChilds()
@@ -168,7 +172,7 @@ public class FlowTopologyService {
} }
} }
protected boolean isTriggerChild(FlowWithSource parent, FlowWithSource child) { protected boolean isTriggerChild(Flow parent, Flow child) {
List<AbstractTrigger> triggers = ListUtils.emptyOnNull(child.getTriggers()); List<AbstractTrigger> triggers = ListUtils.emptyOnNull(child.getTriggers());
// simulated execution: we add a "simulated" label so conditions can know that the evaluation is for a simulated execution // simulated execution: we add a "simulated" label so conditions can know that the evaluation is for a simulated execution
@@ -196,7 +200,7 @@ public class FlowTopologyService {
return conditionMatch && preconditionMatch; return conditionMatch && preconditionMatch;
} }
private boolean validateCondition(Condition condition, FlowWithSource child, Execution execution) { private boolean validateCondition(Condition condition, FlowInterface child, Execution execution) {
if (isFilterCondition(condition)) { if (isFilterCondition(condition)) {
return true; return true;
} }
@@ -208,7 +212,7 @@ public class FlowTopologyService {
return this.conditionService.isValid(condition, child, execution); return this.conditionService.isValid(condition, child, execution);
} }
private boolean validateMultipleConditions(Map<String, Condition> multipleConditions, FlowWithSource child, Execution execution) { private boolean validateMultipleConditions(Map<String, Condition> multipleConditions, FlowInterface child, Execution execution) {
List<Condition> conditions = multipleConditions List<Condition> conditions = multipleConditions
.values() .values()
.stream() .stream()

View File

@@ -6,6 +6,7 @@ import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty; import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.RunnableTask; import io.kestra.core.models.tasks.RunnableTask;
@@ -92,7 +93,7 @@ public class Resume extends Task implements RunnableTask<VoidOutput> {
Execution execution = executionRepository.findById(executionInfo.tenantId(), executionInfo.id()) Execution execution = executionRepository.findById(executionInfo.tenantId(), executionInfo.id())
.orElseThrow(() -> new IllegalArgumentException("No execution found for execution id " + executionInfo.id())); .orElseThrow(() -> new IllegalArgumentException("No execution found for execution id " + executionInfo.id()));
Flow flow = flowExecutor.findByExecution(execution).orElseThrow(() -> new IllegalArgumentException("Flow not found for execution id " + executionInfo.id())); FlowInterface flow = flowExecutor.findByExecution(execution).orElseThrow(() -> new IllegalArgumentException("Flow not found for execution id " + executionInfo.id()));
Map<String, Object> renderedInputs = runContext.render(this.inputs).asMap(String.class, Object.class); Map<String, Object> renderedInputs = runContext.render(this.inputs).asMap(String.class, Object.class);
renderedInputs = !renderedInputs.isEmpty() ? renderedInputs : null; renderedInputs = !renderedInputs.isEmpty() ? renderedInputs : null;

View File

@@ -14,6 +14,7 @@ import io.kestra.core.models.executions.NextTaskRun;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.executions.TaskRunAttempt; import io.kestra.core.models.executions.TaskRunAttempt;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.hierarchies.GraphCluster; import io.kestra.core.models.hierarchies.GraphCluster;
import io.kestra.core.models.hierarchies.RelationType; import io.kestra.core.models.hierarchies.RelationType;
@@ -531,7 +532,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
public Optional<SubflowExecutionResult> createSubflowExecutionResult( public Optional<SubflowExecutionResult> createSubflowExecutionResult(
RunContext runContext, RunContext runContext,
TaskRun taskRun, TaskRun taskRun,
Flow flow, FlowInterface flow,
Execution execution Execution execution
) { ) {

View File

@@ -8,6 +8,7 @@ import io.kestra.core.models.annotations.Example;
import io.kestra.core.models.annotations.Plugin; import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty; import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.executions.TaskRunAttempt; import io.kestra.core.models.executions.TaskRunAttempt;
@@ -197,7 +198,7 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
public Optional<SubflowExecutionResult> createSubflowExecutionResult( public Optional<SubflowExecutionResult> createSubflowExecutionResult(
RunContext runContext, RunContext runContext,
TaskRun taskRun, TaskRun taskRun,
io.kestra.core.models.flows.Flow flow, FlowInterface flow,
Execution execution Execution execution
) { ) {
// we only create a worker task result when the execution is terminated // we only create a worker task result when the execution is terminated

View File

@@ -11,6 +11,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.NextTaskRun; import io.kestra.core.models.executions.NextTaskRun;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.hierarchies.GraphCluster; import io.kestra.core.models.hierarchies.GraphCluster;
import io.kestra.core.models.hierarchies.RelationType; import io.kestra.core.models.hierarchies.RelationType;
import io.kestra.core.models.tasks.FlowableTask; import io.kestra.core.models.tasks.FlowableTask;
@@ -248,7 +249,7 @@ public class Template extends Task implements FlowableTask<Template.Output> {
} }
@SuppressWarnings("deprecated") @SuppressWarnings("deprecated")
public static Flow injectTemplate(Flow flow, Execution execution, TriFunction<String, String, String, io.kestra.core.models.templates.Template> provider) throws InternalException { public static FlowWithSource injectTemplate(Flow flow, Execution execution, TriFunction<String, String, String, io.kestra.core.models.templates.Template> provider) throws InternalException {
AtomicReference<Flow> flowReference = new AtomicReference<>(flow); AtomicReference<Flow> flowReference = new AtomicReference<>(flow);
boolean haveTemplate = true; boolean haveTemplate = true;
@@ -282,7 +283,8 @@ public class Template extends Task implements FlowableTask<Template.Output> {
haveTemplate = !templates.isEmpty(); haveTemplate = !templates.isEmpty();
} }
return flowReference.get(); Flow f = flowReference.get();
return FlowWithSource.of(f, f.sourceOrGenerateIfNull());
} }
/** /**

View File

@@ -0,0 +1,29 @@
package io.kestra.core.models.flows;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
class FlowIdTest {
@Test
void shouldGetUidWithoutRevision() {
String id = FlowId.uidWithoutRevision("tenant", "io.kestra.unittest", "flow-id");
assertThat(id, is("tenant_io.kestra.unittest_flow-id"));
}
@Test
void shouldGetUidGivenEmptyRevision() {
String id = FlowId.uid("tenant", "io.kestra.unittest", "flow-id", Optional.empty());
assertThat(id, is("tenant_io.kestra.unittest_flow-id_-1"));
}
@Test
void shouldGetUidGivenRevision() {
String id = FlowId.uid("tenant", "io.kestra.unittest", "flow-id", Optional.of(42));
assertThat(id, is("tenant_io.kestra.unittest_flow-id_42"));
}
}

View File

@@ -25,8 +25,6 @@ import static org.hamcrest.Matchers.*;
@KestraTest @KestraTest
class FlowTest { class FlowTest {
@Inject
YamlParser yamlParser = new YamlParser();
@Inject @Inject
ModelValidator modelValidator; ModelValidator modelValidator;
@@ -197,6 +195,6 @@ class FlowTest {
File file = new File(resource.getFile()); File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class); return YamlParser.parse(file, Flow.class);
} }
} }

View File

@@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.*;
class FlowWithSourceTest { class FlowWithSourceTest {
@Test @Test
void source() throws JsonProcessingException { void source() throws JsonProcessingException {
var flow = Flow.builder() FlowWithSource flow = FlowWithSource.builder()
.id(IdUtils.create()) .id(IdUtils.create())
.namespace("io.kestra.unittest") .namespace("io.kestra.unittest")
.tasks(List.of( .tasks(List.of(
@@ -37,9 +37,9 @@ class FlowWithSourceTest {
)) ))
.build(); .build();
FlowWithSource flowWithSource = FlowWithSource.of(flow, flow.generateSource()); flow = flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
String source = flowWithSource.getSource(); String source = flow.getSource();
assertThat(source, not(containsString("deleted: false"))); assertThat(source, not(containsString("deleted: false")));
assertThat(source, containsString("format: |\n")); assertThat(source, containsString("format: |\n"));
@@ -60,7 +60,7 @@ class FlowWithSourceTest {
.triggers(List.of(Schedule.builder().id("schedule").cron("0 1 9 * * *").build())); .triggers(List.of(Schedule.builder().id("schedule").cron("0 1 9 * * *").build()));
FlowWithSource flow = builder FlowWithSource flow = builder
.source(JacksonMapper.ofYaml().writeValueAsString(builder.build().toFlow())) .source(JacksonMapper.ofYaml().writeValueAsString(builder.build()))
.build(); .build();
String source = flow.getSource(); String source = flow.getSource();
@@ -73,7 +73,7 @@ class FlowWithSourceTest {
@Test @Test
void of() { void of() {
// test that all fields are transmitted to FlowWithSource // test that all fields are transmitted to FlowWithSource
Flow flow = Flow.builder() FlowWithSource flow = FlowWithSource.builder()
.tenantId("tenantId") .tenantId("tenantId")
.id(IdUtils.create()) .id(IdUtils.create())
.namespace("io.kestra.unittest") .namespace("io.kestra.unittest")
@@ -132,7 +132,7 @@ class FlowWithSourceTest {
.build() .build()
) )
.build(); .build();
String expectedSource = flow.generateSource() + " # additional comment"; String expectedSource = flow.sourceOrGenerateIfNull() + " # additional comment";
FlowWithSource of = FlowWithSource.of(flow, expectedSource); FlowWithSource of = FlowWithSource.of(flow, expectedSource);
assertThat(of.equalsWithoutRevision(flow), is(true)); assertThat(of.equalsWithoutRevision(flow), is(true));

View File

@@ -36,8 +36,6 @@ import static org.hamcrest.Matchers.*;
@KestraTest(startRunner = true) @KestraTest(startRunner = true)
class FlowGraphTest { class FlowGraphTest {
@Inject
private YamlParser yamlParser = new YamlParser();
@Inject @Inject
private GraphService graphService; private GraphService graphService;
@@ -379,7 +377,7 @@ class FlowGraphTest {
File file = new File(resource.getFile()); File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class).withSource(Files.readString(file.toPath())); return YamlParser.parse(file, FlowWithSource.class).toBuilder().source(Files.readString(file.toPath())).build();
} }
private static AbstractGraph node(FlowGraph flowGraph, String taskId) { private static AbstractGraph node(FlowGraph flowGraph, String taskId) {

View File

@@ -1,10 +1,9 @@
package io.kestra.core.repositories; package io.kestra.core.repositories;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.ImmutableList;
import io.kestra.core.Helpers; import io.kestra.core.Helpers;
import io.kestra.core.events.CrudEvent; import io.kestra.core.events.CrudEvent;
import io.kestra.core.events.CrudEventType; import io.kestra.core.events.CrudEventType;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.SearchResult; import io.kestra.core.models.SearchResult;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.*; import io.kestra.core.models.flows.*;
@@ -12,12 +11,9 @@ import io.kestra.core.models.flows.input.StringInput;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.queues.QueueException; import io.kestra.core.queues.QueueException;
import io.kestra.core.schedulers.AbstractSchedulerTest; import io.kestra.core.schedulers.AbstractSchedulerTest;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.FlowService; import io.kestra.core.services.FlowService;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.plugin.core.debug.Return; import io.kestra.plugin.core.debug.Return;
import io.kestra.plugin.core.flow.Template; import io.kestra.plugin.core.flow.Template;
import io.kestra.plugin.core.log.Log;
import io.kestra.core.utils.Await; import io.kestra.core.utils.Await;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.TestsUtils; import io.kestra.core.utils.TestsUtils;
@@ -50,6 +46,9 @@ import static org.mockito.Mockito.spy;
@KestraTest @KestraTest
@TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestInstance(TestInstance.Lifecycle.PER_CLASS)
public abstract class AbstractFlowRepositoryTest { public abstract class AbstractFlowRepositoryTest {
public static final String TEST_TENANT_ID = "tenant";
public static final String TEST_NAMESPACE = "io.kestra.unittest";
public static final String TEST_FLOW_ID = "test";
@Inject @Inject
protected FlowRepositoryInterface flowRepository; protected FlowRepositoryInterface flowRepository;
@@ -59,32 +58,29 @@ public abstract class AbstractFlowRepositoryTest {
@Inject @Inject
private LocalFlowRepositoryLoader repositoryLoader; private LocalFlowRepositoryLoader repositoryLoader;
@Inject
protected PluginDefaultService pluginDefaultService;
@BeforeEach @BeforeEach
protected void init() throws IOException, URISyntaxException { protected void init() throws IOException, URISyntaxException {
TestsUtils.loads(repositoryLoader); TestsUtils.loads(repositoryLoader);
FlowListener.reset(); FlowListener.reset();
} }
private static Flow.FlowBuilder<?, ?> builder() { private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder() {
return builder(IdUtils.create(), "test"); return builder(IdUtils.create(), TEST_FLOW_ID);
} }
private static Flow.FlowBuilder<?, ?> builder(String flowId, String taskId) { private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder(String flowId, String taskId) {
return Flow.builder() return FlowWithSource.builder()
.id(flowId) .id(flowId)
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.tasks(Collections.singletonList(Return.builder().id(taskId).type(Return.class.getName()).format(Property.of("test")).build())); .tasks(Collections.singletonList(Return.builder().id(taskId).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()));
} }
@Test @Test
void findById() { void findById() {
Flow flow = builder() FlowWithSource flow = builder()
.revision(3) .revision(3)
.build(); .build();
flow = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); flow = flowRepository.create(GenericFlow.of(flow));
try { try {
Optional<Flow> full = flowRepository.findById(null, flow.getNamespace(), flow.getId()); Optional<Flow> full = flowRepository.findById(null, flow.getNamespace(), flow.getId());
assertThat(full.isPresent(), is(true)); assertThat(full.isPresent(), is(true));
@@ -99,10 +95,10 @@ public abstract class AbstractFlowRepositoryTest {
@Test @Test
void findByIdWithoutAcl() { void findByIdWithoutAcl() {
Flow flow = builder() FlowWithSource flow = builder()
.revision(3) .revision(3)
.build(); .build();
flow = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); flow = flowRepository.create(GenericFlow.of(flow));
try { try {
Optional<Flow> full = flowRepository.findByIdWithoutAcl(null, flow.getNamespace(), flow.getId(), Optional.empty()); Optional<Flow> full = flowRepository.findByIdWithoutAcl(null, flow.getNamespace(), flow.getId(), Optional.empty());
assertThat(full.isPresent(), is(true)); assertThat(full.isPresent(), is(true));
@@ -117,10 +113,11 @@ public abstract class AbstractFlowRepositoryTest {
@Test @Test
void findByIdWithSource() { void findByIdWithSource() {
Flow flow = builder() FlowWithSource flow = builder()
.revision(3) .revision(3)
.build(); .build();
flow = flowRepository.create(flow, "# comment\n" + flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); String source = "# comment\n" + flow.sourceOrGenerateIfNull();
flow = flowRepository.create(GenericFlow.fromYaml(null, source));
try { try {
Optional<FlowWithSource> full = flowRepository.findByIdWithSource(null, flow.getNamespace(), flow.getId()); Optional<FlowWithSource> full = flowRepository.findByIdWithSource(null, flow.getNamespace(), flow.getId());
@@ -136,96 +133,10 @@ public abstract class AbstractFlowRepositoryTest {
} }
} }
@Test
protected void revision() throws JsonProcessingException {
String flowId = IdUtils.create();
// create with builder
Flow first = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("a").build()))
.build();
// create with repository
FlowWithSource flow = flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource())));
List<FlowWithSource> revisions;
try {
// submit new one, no change
Flow notSaved = flowRepository.update(flow, flow, first.generateSource(), pluginDefaultService.injectDefaults(flow));
assertThat(notSaved.getRevision(), is(flow.getRevision()));
// submit new one with change
Flow flowRev2 = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.tasks(Collections.singletonList(
Log.builder()
.id(IdUtils.create())
.type(Log.class.getName())
.message("Hello World")
.build()
))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("b").build()))
.build();
// revision is incremented
FlowWithSource incremented = flowRepository.update(flowRev2, flow, flowRev2.generateSource(), pluginDefaultService.injectDefaults(flowRev2.withSource(flowRev2.generateSource())));
assertThat(incremented.getRevision(), is(2));
// revision is well saved
revisions = flowRepository.findRevisions(null, flow.getNamespace(), flow.getId());
assertThat(revisions.size(), is(2));
// submit the same one serialized, no changed
FlowWithSource incremented2 = flowRepository.update(
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flowRev2), Flow.class),
flowRev2,
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flowRev2), Flow.class).generateSource(),
pluginDefaultService.injectDefaults(flowRev2.withSource(flowRev2.generateSource()))
);
assertThat(incremented2.getRevision(), is(2));
// resubmit first one, revision is incremented
FlowWithSource incremented3 = flowRepository.update(
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flow.toFlow()), Flow.class),
flowRev2,
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flow.toFlow()), Flow.class).generateSource(),
pluginDefaultService.injectDefaults(JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flow.toFlow()), Flow.class).withSource(flow.getSource()))
);
assertThat(incremented3.getRevision(), is(3));
} finally {
deleteFlow(flow);
}
// revisions is still findable after delete
revisions = flowRepository.findRevisions(null, flow.getNamespace(), flow.getId());
assertThat(revisions.size(), is(4));
Optional<Flow> findDeleted = flowRepository.findById(
null,
flow.getNamespace(),
flow.getId(),
Optional.of(flow.getRevision())
);
assertThat(findDeleted.isPresent(), is(true));
assertThat(findDeleted.get().getRevision(), is(flow.getRevision()));
// recreate the first one, we have a new revision
Flow incremented4 = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow));
try {
assertThat(incremented4.getRevision(), is(5));
} finally {
deleteFlow(incremented4);
}
}
@Test @Test
void save() { void save() {
Flow flow = builder().revision(12).build(); FlowWithSource flow = builder().revision(12).build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
try { try {
assertThat(save.getRevision(), is(1)); assertThat(save.getRevision(), is(1));
@@ -236,8 +147,8 @@ public abstract class AbstractFlowRepositoryTest {
@Test @Test
void saveNoRevision() { void saveNoRevision() {
Flow flow = builder().build(); FlowWithSource flow = builder().build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
try { try {
assertThat(save.getRevision(), is(1)); assertThat(save.getRevision(), is(1));
@@ -304,8 +215,8 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = builder() Flow flow = builder()
.revision(3) .revision(3)
.build(); .build();
String flowSource = "# comment\n" + flow.generateSource(); String flowSource = "# comment\n" + flow.sourceOrGenerateIfNull();
flow = flowRepository.create(flow, flowSource, pluginDefaultService.injectDefaults(flow.withSource(flowSource))); flow = flowRepository.create(GenericFlow.fromYaml(null, flowSource));
try { try {
List<FlowWithSource> save = flowRepository.findByNamespaceWithSource(null, flow.getNamespace()); List<FlowWithSource> save = flowRepository.findByNamespaceWithSource(null, flow.getNamespace());
@@ -360,7 +271,7 @@ public abstract class AbstractFlowRepositoryTest {
void delete() { void delete() {
Flow flow = builder().build(); Flow flow = builder().build();
FlowWithSource save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
try { try {
assertThat(flowRepository.findById(null, save.getNamespace(), save.getId()).isPresent(), is(true)); assertThat(flowRepository.findById(null, save.getNamespace(), save.getId()).isPresent(), is(true));
@@ -384,12 +295,12 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder() Flow flow = Flow.builder()
.id(flowId) .id(flowId)
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("a").build())) .inputs(List.of(StringInput.builder().type(Type.STRING).id("a").build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build())) .tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build(); .build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); Flow save = flowRepository.create(GenericFlow.of(flow));
try { try {
assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true)); assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true));
@@ -397,14 +308,14 @@ public abstract class AbstractFlowRepositoryTest {
Flow update = Flow.builder() Flow update = Flow.builder()
.id(IdUtils.create()) .id(IdUtils.create())
.namespace("io.kestra.unittest2") .namespace("io.kestra.unittest2")
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("b").build())) .inputs(List.of(StringInput.builder().type(Type.STRING).id("b").build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build())) .tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build(); .build();
; ;
ConstraintViolationException e = assertThrows( ConstraintViolationException e = assertThrows(
ConstraintViolationException.class, ConstraintViolationException.class,
() -> flowRepository.update(update, flow, update.generateSource(), pluginDefaultService.injectDefaults(update.withSource(update.generateSource()))) () -> flowRepository.update(GenericFlow.of(update), flow)
); );
assertThat(e.getConstraintViolations().size(), is(2)); assertThat(e.getConstraintViolations().size(), is(2));
@@ -419,26 +330,26 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder() Flow flow = Flow.builder()
.id(flowId) .id(flowId)
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.triggers(Collections.singletonList(AbstractSchedulerTest.UnitTest.builder() .triggers(Collections.singletonList(AbstractSchedulerTest.UnitTest.builder()
.id("sleep") .id("sleep")
.type(AbstractSchedulerTest.UnitTest.class.getName()) .type(AbstractSchedulerTest.UnitTest.class.getName())
.build())) .build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build())) .tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build(); .build();
flow = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); flow = flowRepository.create(GenericFlow.of(flow));
try { try {
assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true)); assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true));
Flow update = Flow.builder() Flow update = Flow.builder()
.id(flowId) .id(flowId)
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build())) .tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build(); .build();
; ;
Flow updated = flowRepository.update(update, flow, update.generateSource(), pluginDefaultService.injectDefaults(update.withSource(update.generateSource()))); Flow updated = flowRepository.update(GenericFlow.of(update), flow);
assertThat(updated.getTriggers(), is(nullValue())); assertThat(updated.getTriggers(), is(nullValue()));
} finally { } finally {
deleteFlow(flow); deleteFlow(flow);
@@ -457,15 +368,15 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder() Flow flow = Flow.builder()
.id(flowId) .id(flowId)
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.triggers(Collections.singletonList(AbstractSchedulerTest.UnitTest.builder() .triggers(Collections.singletonList(AbstractSchedulerTest.UnitTest.builder()
.id("sleep") .id("sleep")
.type(AbstractSchedulerTest.UnitTest.class.getName()) .type(AbstractSchedulerTest.UnitTest.class.getName())
.build())) .build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build())) .tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build(); .build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); Flow save = flowRepository.create(GenericFlow.of(flow));
try { try {
assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true)); assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true));
} finally { } finally {
@@ -489,7 +400,7 @@ public abstract class AbstractFlowRepositoryTest {
Template template = Template.builder() Template template = Template.builder()
.id(IdUtils.create()) .id(IdUtils.create())
.type(Template.class.getName()) .type(Template.class.getName())
.namespace("test") .namespace(TEST_FLOW_ID)
.templateId("testTemplate") .templateId("testTemplate")
.build(); .build();
@@ -501,15 +412,11 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder() Flow flow = Flow.builder()
.id(IdUtils.create()) .id(IdUtils.create())
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.tasks(Collections.singletonList(templateSpy)) .tasks(Collections.singletonList(templateSpy))
.build(); .build();
flow = flowRepository.create( flow = flowRepository.create(GenericFlow.of(flow));
flow,
flow.generateSource(),
flow
);
try { try {
Optional<Flow> found = flowRepository.findById(null, flow.getNamespace(), flow.getId()); Optional<Flow> found = flowRepository.findById(null, flow.getNamespace(), flow.getId());
@@ -523,52 +430,181 @@ public abstract class AbstractFlowRepositoryTest {
} }
@Test @Test
protected void lastRevision() { protected void shouldReturnNullRevisionForNonExistingFlow() {
String namespace = "io.kestra.unittest"; assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, IdUtils.create()), nullValue());
String flowId = IdUtils.create(); }
String tenantId = "tenant";
assertThat(flowRepository.lastRevision(tenantId, namespace, flowId), nullValue()); @Test
protected void shouldReturnLastRevisionOnCreate() {
// create with builder // Given
Flow first = Flow.builder() final List<Flow> toDelete = new ArrayList<>();
.tenantId(tenantId) final String flowId = IdUtils.create();
.id(flowId)
.namespace(namespace)
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("a").build()))
.build();
// create with repository
first = flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource())));
try { try {
assertThat(flowRepository.lastRevision(tenantId, namespace, flowId), is(1)); // When
toDelete.add(flowRepository.create(createTestingLogFlow(flowId, "???")));
Integer result = flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId);
// submit new one with change // Then
assertThat(result, is(1));
Flow flowRev2 = first.toBuilder() assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(1));
.tasks(Collections.singletonList(
Log.builder()
.id(IdUtils.create())
.type(Log.class.getName())
.message("Hello World")
.build()
))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("b").build()))
.build();
first = flowRepository.update(flowRev2, first, flowRev2.generateSource(), pluginDefaultService.injectDefaults(flowRev2.withSource(flowRev2.generateSource())));
assertThat(flowRepository.lastRevision(tenantId, namespace, flowId), is(2));
} finally { } finally {
deleteFlow(first); toDelete.forEach(this::deleteFlow);
} }
} }
@Test
protected void shouldIncrementRevisionOnDelete() {
// Given
final String flowId = IdUtils.create();
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(1));
// When
flowRepository.delete(created);
// Then
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(2));
}
@Test
protected void shouldIncrementRevisionOnCreateAfterDelete() {
// Given
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
flowRepository.delete(
flowRepository.create(createTestingLogFlow(flowId, "first"))
);
// When
toDelete.add(flowRepository.create(createTestingLogFlow(flowId, "second")));
// Then
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(3));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(3));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldReturnNullForLastRevisionAfterDelete() {
// Given
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
toDelete.add(updated);
// When
flowRepository.delete(updated);
// Then
assertThat(flowRepository.findById(TEST_TENANT_ID, TEST_NAMESPACE, flowId, Optional.empty()), is(Optional.empty()));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(nullValue()));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldFindAllRevisionsAfterDelete() {
// Given
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
toDelete.add(updated);
// When
flowRepository.delete(updated);
// Then
assertThat(flowRepository.findById(TEST_TENANT_ID, TEST_NAMESPACE, flowId, Optional.empty()), is(Optional.empty()));
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(3));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldIncrementRevisionOnUpdateGivenNotEqualSource() {
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
// When
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
toDelete.add(updated);
// Then
assertThat(updated.getRevision(), is(2));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(2));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldNotIncrementRevisionOnUpdateGivenEqualSource() {
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
// When
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "first"), created);
toDelete.add(updated);
// Then
assertThat(updated.getRevision(), is(1));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(1));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
void shouldReturnForFindGivenQueryWildcard() {
ArrayListTotal<Flow> flows = flowRepository.find(Pageable.from(1, 10), "*", null, null, null, Map.of());
assertThat(flows.size(), is(10));
assertThat(flows.getTotal(), is(Helpers.FLOWS_COUNT));
}
@Test
void shouldReturnForGivenQueryWildCardFilters() {
List<QueryFilter> filters = List.of(
QueryFilter.builder().field(QueryFilter.Field.QUERY).operation(QueryFilter.Op.EQUALS).value("*").build()
);
ArrayListTotal<Flow> flows = flowRepository.find(Pageable.from(1, 10), null, filters);
assertThat(flows.size(), is(10));
assertThat(flows.getTotal(), is(Helpers.FLOWS_COUNT));
}
@Test @Test
void findByExecution() { void findByExecution() {
Flow flow = builder() Flow flow = builder()
.revision(1) .revision(1)
.build(); .build();
flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow)); flowRepository.create(GenericFlow.of(flow));
Execution execution = Execution.builder() Execution execution = Execution.builder()
.id(IdUtils.create()) .id(IdUtils.create())
.namespace(flow.getNamespace()) .namespace(flow.getNamespace())
@@ -599,7 +635,7 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = builder() Flow flow = builder()
.revision(3) .revision(3)
.build(); .build();
flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow)); flowRepository.create(GenericFlow.of(flow));
Execution execution = Execution.builder() Execution execution = Execution.builder()
.id(IdUtils.create()) .id(IdUtils.create())
.namespace(flow.getNamespace()) .namespace(flow.getNamespace())
@@ -629,8 +665,8 @@ public abstract class AbstractFlowRepositoryTest {
FlowWithSource toDelete = null; FlowWithSource toDelete = null;
try { try {
// Given // Given
Flow flow = createTestFlowForNamespace("io.kestra.unittest"); Flow flow = createTestFlowForNamespace(TEST_NAMESPACE);
toDelete = flowRepository.create(flow, "", flow); toDelete = flowRepository.create(GenericFlow.of(flow));
// When // When
int count = flowRepository.count(null); int count = flowRepository.count(null);
@@ -647,14 +683,14 @@ public abstract class AbstractFlowRepositoryTest {
void shouldCountForNullTenantGivenNamespace() { void shouldCountForNullTenantGivenNamespace() {
List<FlowWithSource> toDelete = new ArrayList<>(); List<FlowWithSource> toDelete = new ArrayList<>();
try { try {
toDelete.add(flowRepository.create(createTestFlowForNamespace("io.kestra.unittest.sub"), "", createTestFlowForNamespace("io.kestra.unittest.sub"))); toDelete.add(flowRepository.create(GenericFlow.of(createTestFlowForNamespace("io.kestra.unittest.sub"))));
toDelete.add(flowRepository.create(createTestFlowForNamespace("io.kestra.unittest.shouldcountbynamespacefornulltenant"), "", createTestFlowForNamespace("io.kestra.unittest.shouldcountbynamespacefornulltenant"))); toDelete.add(flowRepository.create(GenericFlow.of(createTestFlowForNamespace("io.kestra.unittest.shouldcountbynamespacefornulltenant"))));
toDelete.add(flowRepository.create(createTestFlowForNamespace("com.kestra.unittest"), "", createTestFlowForNamespace("com.kestra.unittest"))); toDelete.add(flowRepository.create(GenericFlow.of(createTestFlowForNamespace("com.kestra.unittest"))));
int count = flowRepository.countForNamespace(null, "io.kestra.unittest.shouldcountbynamespacefornulltenant"); int count = flowRepository.countForNamespace(null, "io.kestra.unittest.shouldcountbynamespacefornulltenant");
assertThat(count, is(1)); assertThat(count, is(1));
count = flowRepository.countForNamespace(null, "io.kestra.unittest"); count = flowRepository.countForNamespace(null, TEST_NAMESPACE);
assertThat(count, is(2)); assertThat(count, is(2));
} finally { } finally {
for (FlowWithSource flow : toDelete) { for (FlowWithSource flow : toDelete) {
@@ -676,8 +712,12 @@ public abstract class AbstractFlowRepositoryTest {
} }
private void deleteFlow(Flow flow) { private void deleteFlow(Flow flow) {
Integer revision = flowRepository.lastRevision(flow.getTenantId(), flow.getNamespace(), flow.getId()); if (flow == null) {
flowRepository.delete(flow.toBuilder().revision(revision).build().withSource(flow.generateSource())); return;
}
flowRepository
.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId())
.ifPresent(delete -> flowRepository.delete(flow.toBuilder().revision(null).build()));
} }
@Singleton @Singleton
@@ -694,4 +734,17 @@ public abstract class AbstractFlowRepositoryTest {
emits = new ArrayList<>(); emits = new ArrayList<>();
} }
} }
private static GenericFlow createTestingLogFlow(String id, String logMessage) {
String source = """
id: %s
namespace: %s
tasks:
- id: log
type: io.kestra.plugin.core.log.Log
message: %s
""".formatted(id, TEST_NAMESPACE, logMessage);
return GenericFlow.fromYaml(TEST_TENANT_ID, source);
}
} }

View File

@@ -1,10 +1,13 @@
package io.kestra.core.runners; package io.kestra.core.runners;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException; import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface; import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface; import io.kestra.core.queues.QueueInterface;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.FlowListenersInterface; import io.kestra.core.services.FlowListenersInterface;
import io.kestra.core.utils.Await; import io.kestra.core.utils.Await;
import io.kestra.core.utils.TestsUtils; import io.kestra.core.utils.TestsUtils;
@@ -266,14 +269,17 @@ public class DeserializationIssuesCaseTest {
assertThat(workerTriggerResult.get().getSuccess(), is(Boolean.FALSE)); assertThat(workerTriggerResult.get().getSuccess(), is(Boolean.FALSE));
} }
public void flowDeserializationIssue(Consumer<QueueMessage> sendToQueue) throws TimeoutException, QueueException{ public void flowDeserializationIssue(Consumer<QueueMessage> sendToQueue) throws Exception {
AtomicReference<List<FlowWithSource>> flows = new AtomicReference<>(); AtomicReference<List<FlowWithSource>> flows = new AtomicReference<>();
flowListeners.listen(newFlows -> flows.set(newFlows)); flowListeners.listen(flows::set);
sendToQueue.accept(new QueueMessage(FlowWithSource.class, INVALID_FLOW_KEY, INVALID_FLOW_VALUE)); sendToQueue.accept(new QueueMessage(FlowInterface.class, INVALID_FLOW_KEY, INVALID_FLOW_VALUE));
Await.until( Await.until(
() -> flows.get() != null && flows.get().stream().anyMatch(newFlow -> newFlow.uid().equals("company.team_hello-world_2") && (newFlow.getTasks() == null || newFlow.getTasks().isEmpty())), () -> flows.get() != null && flows.get()
.stream()
.anyMatch(newFlow -> newFlow.uid().equals("company.team_hello-world_2"))
,
Duration.ofMillis(100), Duration.ofMillis(100),
Duration.ofMinutes(1) Duration.ofMinutes(1)
); );

View File

@@ -9,13 +9,12 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.repositories.ExecutionRepositoryInterface; import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.LogRepositoryInterface; import io.kestra.core.repositories.LogRepositoryInterface;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.ExecutionService; import io.kestra.core.services.ExecutionService;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.utils.Await; import io.kestra.core.utils.Await;
import io.kestra.plugin.core.debug.Return; import io.kestra.plugin.core.debug.Return;
import jakarta.inject.Inject; import jakarta.inject.Inject;
@@ -43,9 +42,6 @@ class ExecutionServiceTest {
@Inject @Inject
FlowRepositoryInterface flowRepository; FlowRepositoryInterface flowRepository;
@Inject
PluginDefaultService pluginDefaultService;
@Inject @Inject
ExecutionRepositoryInterface executionRepository; ExecutionRepositoryInterface executionRepository;
@@ -83,7 +79,7 @@ class ExecutionServiceTest {
FlowWithSource flow = flowRepository.findByIdWithSource(null, "io.kestra.tests", "restart_last_failed").orElseThrow(); FlowWithSource flow = flowRepository.findByIdWithSource(null, "io.kestra.tests", "restart_last_failed").orElseThrow();
flowRepository.update( flowRepository.update(
flow, GenericFlow.of(flow),
flow.updateTask( flow.updateTask(
"a", "a",
Return.builder() Return.builder()
@@ -91,9 +87,7 @@ class ExecutionServiceTest {
.type(Return.class.getName()) .type(Return.class.getName())
.format(Property.of("replace")) .format(Property.of("replace"))
.build() .build()
), )
JacksonMapper.ofYaml().writeValueAsString(flow),
pluginDefaultService.injectDefaults(flow)
); );

View File

@@ -1,11 +1,11 @@
package io.kestra.core.runners; package io.kestra.core.runners;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.junit.annotations.KestraTest; import io.kestra.core.junit.annotations.KestraTest;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface; import io.kestra.core.services.FlowListenersInterface;
import io.kestra.plugin.core.debug.Return; import io.kestra.plugin.core.debug.Return;
@@ -25,11 +25,8 @@ abstract public class FlowListenersTest {
@Inject @Inject
protected FlowRepositoryInterface flowRepository; protected FlowRepositoryInterface flowRepository;
@Inject
protected PluginDefaultService pluginDefaultService;
protected static FlowWithSource create(String flowId, String taskId) { protected static FlowWithSource create(String flowId, String taskId) {
Flow flow = Flow.builder() FlowWithSource flow = FlowWithSource.builder()
.id(flowId) .id(flowId)
.namespace("io.kestra.unittest") .namespace("io.kestra.unittest")
.revision(1) .revision(1)
@@ -39,7 +36,7 @@ abstract public class FlowListenersTest {
.format(Property.of("test")) .format(Property.of("test"))
.build())) .build()))
.build(); .build();
return flow.withSource(flow.generateSource()); return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
} }
public void suite(FlowListenersInterface flowListenersService) { public void suite(FlowListenersInterface flowListenersService) {
@@ -72,44 +69,44 @@ abstract public class FlowListenersTest {
FlowWithSource firstUpdated = create(first.getId(), "test2"); FlowWithSource firstUpdated = create(first.getId(), "test2");
flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource()))); flowRepository.create(GenericFlow.of(first));
wait(ref, () -> { wait(ref, () -> {
assertThat(count.get(), is(1)); assertThat(count.get(), is(1));
assertThat(flowListenersService.flows().size(), is(1)); assertThat(flowListenersService.flows().size(), is(1));
}); });
// create the same id than first, no additional flows // create the same id than first, no additional flows
first = flowRepository.update(firstUpdated, first, firstUpdated.generateSource(), pluginDefaultService.injectDefaults(firstUpdated.withSource(firstUpdated.generateSource()))); first = flowRepository.update(GenericFlow.of(firstUpdated), first);
wait(ref, () -> { wait(ref, () -> {
assertThat(count.get(), is(1)); assertThat(count.get(), is(1));
assertThat(flowListenersService.flows().size(), is(1)); assertThat(flowListenersService.flows().size(), is(1));
assertThat(flowListenersService.flows().getFirst().getTasks().getFirst().getId(), is("test2")); //assertThat(flowListenersService.flows().getFirst().getFirst().getId(), is("test2"));
}); });
Flow second = create("second_" + IdUtils.create(), "test"); FlowWithSource second = create("second_" + IdUtils.create(), "test");
// create a new one // create a new one
flowRepository.create(second, second.generateSource(), pluginDefaultService.injectDefaults(second.withSource(second.generateSource()))); flowRepository.create(GenericFlow.of(second));
wait(ref, () -> { wait(ref, () -> {
assertThat(count.get(), is(2)); assertThat(count.get(), is(2));
assertThat(flowListenersService.flows().size(), is(2)); assertThat(flowListenersService.flows().size(), is(2));
}); });
// delete first // delete first
Flow deleted = flowRepository.delete(first); FlowWithSource deleted = flowRepository.delete(first);
wait(ref, () -> { wait(ref, () -> {
assertThat(count.get(), is(1)); assertThat(count.get(), is(1));
assertThat(flowListenersService.flows().size(), is(1)); assertThat(flowListenersService.flows().size(), is(1));
}); });
// restore must works // restore must works
flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource()))); flowRepository.create(GenericFlow.of(first));
wait(ref, () -> { wait(ref, () -> {
assertThat(count.get(), is(2)); assertThat(count.get(), is(2));
assertThat(flowListenersService.flows().size(), is(2)); assertThat(flowListenersService.flows().size(), is(2));
}); });
Flow withTenant = first.toBuilder().tenantId("some-tenant").build(); FlowWithSource withTenant = first.toBuilder().tenantId("some-tenant").build();
flowRepository.create(withTenant, withTenant.generateSource(), pluginDefaultService.injectDefaults(withTenant.withSource(withTenant.generateSource()))); flowRepository.create(GenericFlow.of(withTenant));
wait(ref, () -> { wait(ref, () -> {
assertThat(count.get(), is(3)); assertThat(count.get(), is(3));
assertThat(flowListenersService.flows().size(), is(3)); assertThat(flowListenersService.flows().size(), is(3));

View File

@@ -48,11 +48,11 @@ abstract public class AbstractSchedulerTest {
@Inject @Inject
protected ExecutionService executionService; protected ExecutionService executionService;
public static Flow createThreadFlow() { public static FlowWithSource createThreadFlow() {
return createThreadFlow(null); return createThreadFlow(null);
} }
public static Flow createThreadFlow(String workerGroup) { public static FlowWithSource createThreadFlow(String workerGroup) {
UnitTest schedule = UnitTest.builder() UnitTest schedule = UnitTest.builder()
.id("sleep") .id("sleep")
.type(UnitTest.class.getName()) .type(UnitTest.class.getName())
@@ -72,7 +72,7 @@ abstract public class AbstractSchedulerTest {
} }
protected static FlowWithSource createFlow(List<AbstractTrigger> triggers, List<PluginDefault> list) { protected static FlowWithSource createFlow(List<AbstractTrigger> triggers, List<PluginDefault> list) {
Flow.FlowBuilder<?, ?> builder = Flow.builder() FlowWithSource.FlowWithSourceBuilder<?, ?> builder = FlowWithSource.builder()
.id(IdUtils.create()) .id(IdUtils.create())
.namespace("io.kestra.unittest") .namespace("io.kestra.unittest")
.inputs(List.of( .inputs(List.of(
@@ -107,8 +107,8 @@ abstract public class AbstractSchedulerTest {
builder.pluginDefaults(list); builder.pluginDefaults(list);
} }
Flow flow = builder.build(); FlowWithSource flow = builder.build();
return FlowWithSource.of(flow, flow.generateSource()); return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
} }
protected static FlowWithSource createLongRunningFlow(List<AbstractTrigger> triggers, List<PluginDefault> list) { protected static FlowWithSource createLongRunningFlow(List<AbstractTrigger> triggers, List<PluginDefault> list) {

View File

@@ -2,8 +2,10 @@ package io.kestra.core.schedulers;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.triggers.Trigger; import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.FlowListeners; import io.kestra.core.runners.FlowListeners;
@@ -44,7 +46,7 @@ class SchedulerConditionTest extends AbstractSchedulerTest {
protected FlowRepositoryInterface flowRepository; protected FlowRepositoryInterface flowRepository;
private static Flow createScheduleFlow() { private static FlowWithSource createScheduleFlow() {
Schedule schedule = Schedule.builder() Schedule schedule = Schedule.builder()
.id("hourly") .id("hourly")
.type(Schedule.class.getName()) .type(Schedule.class.getName())
@@ -72,8 +74,8 @@ class SchedulerConditionTest extends AbstractSchedulerTest {
SchedulerExecutionStateInterface executionRepositorySpy = spy(this.executionState); SchedulerExecutionStateInterface executionRepositorySpy = spy(this.executionState);
CountDownLatch queueCount = new CountDownLatch(4); CountDownLatch queueCount = new CountDownLatch(4);
Flow flow = createScheduleFlow(); FlowWithSource flow = createScheduleFlow();
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
Trigger trigger = Trigger.builder() Trigger trigger = Trigger.builder()
.namespace(flow.getNamespace()) .namespace(flow.getNamespace())
@@ -101,7 +103,7 @@ class SchedulerConditionTest extends AbstractSchedulerTest {
Flux<Execution> receive = TestsUtils.receive(executionQueue, throwConsumer(either -> { Flux<Execution> receive = TestsUtils.receive(executionQueue, throwConsumer(either -> {
Execution execution = either.getLeft(); Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.CREATED) { if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, trigger, flow.withSource(flow.generateSource())); terminateExecution(execution, trigger, flow);
queueCount.countDown(); queueCount.countDown();
if (queueCount.getCount() == 0) { if (queueCount.getCount() == 0) {

View File

@@ -1,7 +1,9 @@
package io.kestra.core.schedulers; package io.kestra.core.schedulers;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.utils.TestsUtils; import io.kestra.core.utils.TestsUtils;
import io.kestra.jdbc.runner.JdbcScheduler; import io.kestra.jdbc.runner.JdbcScheduler;
@@ -92,11 +94,11 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
// mock flow listener // mock flow listener
FlowListeners flowListenersServiceSpy = spy(this.flowListenersService); FlowListeners flowListenersServiceSpy = spy(this.flowListenersService);
PollingTrigger pollingTrigger = createPollingTrigger(List.of(State.Type.FAILED)).build(); PollingTrigger pollingTrigger = createPollingTrigger(List.of(State.Type.FAILED)).build();
Flow flow = createPollingTriggerFlow(pollingTrigger) FlowWithSource flow = createPollingTriggerFlow(pollingTrigger)
.toBuilder() .toBuilder()
.tasks(List.of(Fail.builder().id("fail").type(Fail.class.getName()).build())) .tasks(List.of(Fail.builder().id("fail").type(Fail.class.getName()).build()))
.build(); .build();
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow)) doReturn(List.of(flow))
.when(flowListenersServiceSpy) .when(flowListenersServiceSpy)
.flows(); .flows();
@@ -115,7 +117,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
queueCount.countDown(); queueCount.countDown();
if (execution.getLeft().getState().getCurrent() == State.Type.CREATED) { if (execution.getLeft().getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution.getLeft(), State.Type.FAILED, Trigger.of(flow, pollingTrigger), flow.withSource(flow.generateSource())); terminateExecution(execution.getLeft(), State.Type.FAILED, Trigger.of(flow, pollingTrigger), flow);
} }
} }
})); }));
@@ -184,7 +186,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
} }
} }
private Flow createPollingTriggerFlow(PollingTrigger pollingTrigger) { private FlowWithSource createPollingTriggerFlow(PollingTrigger pollingTrigger) {
return createFlow(Collections.singletonList(pollingTrigger)); return createFlow(Collections.singletonList(pollingTrigger));
} }

View File

@@ -2,7 +2,9 @@ package io.kestra.core.schedulers;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.triggers.RecoverMissedSchedules; import io.kestra.core.models.triggers.RecoverMissedSchedules;
import io.kestra.core.models.triggers.Trigger; import io.kestra.core.models.triggers.Trigger;
@@ -51,7 +53,7 @@ public class SchedulerScheduleOnDatesTest extends AbstractSchedulerTest {
)); ));
} }
private Flow createScheduleFlow(String zone, String triggerId) { private FlowWithSource createScheduleFlow(String zone, String triggerId) {
var now = ZonedDateTime.now(); var now = ZonedDateTime.now();
var before = now.minusSeconds(3).truncatedTo(ChronoUnit.SECONDS); var before = now.minusSeconds(3).truncatedTo(ChronoUnit.SECONDS);
var after = now.plusSeconds(3).truncatedTo(ChronoUnit.SECONDS); var after = now.plusSeconds(3).truncatedTo(ChronoUnit.SECONDS);
@@ -78,8 +80,8 @@ public class SchedulerScheduleOnDatesTest extends AbstractSchedulerTest {
Set<String> executionId = new HashSet<>(); Set<String> executionId = new HashSet<>();
// then flow should be executed 4 times // then flow should be executed 4 times
Flow flow = createScheduleFlow("Europe/Paris", "schedule"); FlowWithSource flow = createScheduleFlow("Europe/Paris", "schedule");
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow)) doReturn(List.of(flow))
.when(flowListenersServiceSpy) .when(flowListenersServiceSpy)
@@ -107,7 +109,7 @@ public class SchedulerScheduleOnDatesTest extends AbstractSchedulerTest {
executionId.add(execution.getId()); executionId.add(execution.getId());
if (execution.getState().getCurrent() == State.Type.CREATED) { if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, trigger, flow.withSource(flow.generateSource())); terminateExecution(execution, trigger, flow);
} }
assertThat(execution.getFlowId(), is(flow.getId())); assertThat(execution.getFlowId(), is(flow.getId()));
queueCount.countDown(); queueCount.countDown();

View File

@@ -5,6 +5,7 @@ import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.PluginDefault; import io.kestra.core.models.flows.PluginDefault;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.utils.TestsUtils; import io.kestra.core.utils.TestsUtils;
import io.kestra.jdbc.runner.JdbcScheduler; import io.kestra.jdbc.runner.JdbcScheduler;
@@ -101,7 +102,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
FlowWithSource invalid = createScheduleFlow("Asia/Delhi", "schedule", true); FlowWithSource invalid = createScheduleFlow("Asia/Delhi", "schedule", true);
FlowWithSource flow = createScheduleFlow("Europe/Paris", "schedule", false); FlowWithSource flow = createScheduleFlow("Europe/Paris", "schedule", false);
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(invalid, flow)) doReturn(List.of(invalid, flow))
.when(flowListenersServiceSpy) .when(flowListenersServiceSpy)
.flows(); .flows();
@@ -137,7 +138,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
executionId.add(execution.getId()); executionId.add(execution.getId());
if (execution.getState().getCurrent() == State.Type.CREATED) { if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, trigger, flow.withSource(flow.generateSource())); terminateExecution(execution, trigger, flow);
} }
assertThat(execution.getFlowId(), is(flow.getId())); assertThat(execution.getFlowId(), is(flow.getId()));
queueCount.countDown(); queueCount.countDown();
@@ -430,7 +431,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.when(flowListenersServiceSpy) .when(flowListenersServiceSpy)
.flows(); .flows();
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
// to avoid waiting too much before a trigger execution, we add a last trigger with a date now - 1m. // to avoid waiting too much before a trigger execution, we add a last trigger with a date now - 1m.
Trigger lastTrigger = Trigger Trigger lastTrigger = Trigger
.builder() .builder()
@@ -453,7 +454,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
assertThat(execution.getFlowId(), is(flow.getId())); assertThat(execution.getFlowId(), is(flow.getId()));
if (execution.getState().getCurrent() == State.Type.CREATED) { if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, lastTrigger, flow.withSource(flow.generateSource())); terminateExecution(execution, lastTrigger, flow);
} }
queueCount.countDown(); queueCount.countDown();
})); }));
@@ -542,7 +543,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.build() .build()
) )
); );
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow)) doReturn(List.of(flow))
.when(flowListenersServiceSpy) .when(flowListenersServiceSpy)
.flows(); .flows();
@@ -614,7 +615,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.build() .build()
) )
); );
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow)) doReturn(List.of(flow))
.when(flowListenersServiceSpy) .when(flowListenersServiceSpy)
.flows(); .flows();
@@ -650,7 +651,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
lastTrigger.getNextExecutionDate().plusMinutes(3).toInstant() lastTrigger.getNextExecutionDate().plusMinutes(3).toInstant()
)))) ))))
.build())); .build()));
terminateExecution(terminated, lastTrigger, flow.withSource(flow.generateSource())); terminateExecution(terminated, lastTrigger, flow);
} }
queueCount.countDown(); queueCount.countDown();
})); }));

View File

@@ -3,8 +3,10 @@ package io.kestra.core.schedulers;
import io.kestra.core.models.Label; import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.Trigger; import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.FlowListeners; import io.kestra.core.runners.FlowListeners;
import io.kestra.core.runners.TestMethodScopedWorker; import io.kestra.core.runners.TestMethodScopedWorker;
@@ -39,8 +41,8 @@ public class SchedulerThreadTest extends AbstractSchedulerTest {
@Test @Test
void thread() throws Exception { void thread() throws Exception {
Flow flow = createThreadFlow(); FlowWithSource flow = createThreadFlow();
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
CountDownLatch queueCount = new CountDownLatch(2); CountDownLatch queueCount = new CountDownLatch(2);
// wait for execution // wait for execution
@@ -50,7 +52,7 @@ public class SchedulerThreadTest extends AbstractSchedulerTest {
assertThat(execution.getFlowId(), is(flow.getId())); assertThat(execution.getFlowId(), is(flow.getId()));
if (execution.getState().getCurrent() != State.Type.SUCCESS) { if (execution.getState().getCurrent() != State.Type.SUCCESS) {
terminateExecution(execution, Trigger.of(flow, flow.getTriggers().getFirst()), flow.withSource(flow.generateSource())); terminateExecution(execution, Trigger.of(flow, flow.getTriggers().getFirst()), flow);
queueCount.countDown(); queueCount.countDown();
} }
})); }));

View File

@@ -6,7 +6,9 @@ import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.ExecutionKilledTrigger; import io.kestra.core.models.executions.ExecutionKilledTrigger;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.models.triggers.AbstractTrigger; import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.PollingTriggerInterface; import io.kestra.core.models.triggers.PollingTriggerInterface;
@@ -49,7 +51,7 @@ public class SchedulerTriggerChangeTest extends AbstractSchedulerTest {
@Inject @Inject
@Named(QueueFactoryInterface.FLOW_NAMED) @Named(QueueFactoryInterface.FLOW_NAMED)
protected QueueInterface<FlowWithSource> flowQueue; protected QueueInterface<FlowInterface> flowQueue;
@Inject @Inject
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED) @Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
@@ -83,7 +85,7 @@ public class SchedulerTriggerChangeTest extends AbstractSchedulerTest {
) )
.build(); .build();
return FlowWithSource.of(flow, flow.generateSource()); return FlowWithSource.of(flow, flow.getSource());
} }
@Test @Test
@@ -119,7 +121,7 @@ public class SchedulerTriggerChangeTest extends AbstractSchedulerTest {
// emit a flow trigger to be started // emit a flow trigger to be started
FlowWithSource flow = createFlow(Duration.ofSeconds(10)); FlowWithSource flow = createFlow(Duration.ofSeconds(10));
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
flowQueue.emit(flow); flowQueue.emit(flow);
Await.until(() -> STARTED_COUNT == 1, Duration.ofMillis(100), Duration.ofSeconds(30)); Await.until(() -> STARTED_COUNT == 1, Duration.ofMillis(100), Duration.ofSeconds(30));

View File

@@ -35,9 +35,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
class YamlParserTest { class YamlParserTest {
private static final ObjectMapper MAPPER = JacksonMapper.ofJson(); private static final ObjectMapper MAPPER = JacksonMapper.ofJson();
@Inject
private YamlParser yamlParser;
@Inject @Inject
private ModelValidator modelValidator; private ModelValidator modelValidator;
@@ -213,7 +210,7 @@ class YamlParserTest {
TypeReference<Map<String, Object>> TYPE_REFERENCE = new TypeReference<>() {}; TypeReference<Map<String, Object>> TYPE_REFERENCE = new TypeReference<>() {};
Map<String, Object> flow = JacksonMapper.ofYaml().readValue(flowSource, TYPE_REFERENCE); Map<String, Object> flow = JacksonMapper.ofYaml().readValue(flowSource, TYPE_REFERENCE);
Flow parse = yamlParser.parse(flow, Flow.class, false); Flow parse = YamlParser.parse(flow, Flow.class, false);
assertThat(parse.getId(), is("duplicate")); assertThat(parse.getId(), is("duplicate"));
} }
@@ -245,7 +242,7 @@ class YamlParserTest {
File file = new File(resource.getFile()); File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class); return YamlParser.parse(file, Flow.class);
} }
private Flow parseString(String path) throws IOException { private Flow parseString(String path) throws IOException {
@@ -254,6 +251,6 @@ class YamlParserTest {
String input = Files.readString(Path.of(resource.getPath()), Charset.defaultCharset()); String input = Files.readString(Path.of(resource.getPath()), Charset.defaultCharset());
return yamlParser.parse(input, Flow.class); return YamlParser.parse(input, Flow.class);
} }
} }

View File

@@ -1,8 +1,9 @@
package io.kestra.core.services; package io.kestra.core.services;
import io.kestra.core.junit.annotations.KestraTest; import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.flows.Type; import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.StringInput; import io.kestra.core.models.flows.input.StringInput;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
@@ -10,7 +11,6 @@ import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.plugin.core.debug.Echo; import io.kestra.plugin.core.debug.Echo;
import io.kestra.plugin.core.debug.Return; import io.kestra.plugin.core.debug.Return;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Collections; import java.util.Collections;
@@ -21,25 +21,25 @@ import java.util.stream.Stream;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThrows;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
@KestraTest @KestraTest
class FlowServiceTest { class FlowServiceTest {
private static final String TEST_NAMESPACE = "io.kestra.unittest";
@Inject @Inject
private FlowService flowService; private FlowService flowService;
@Inject @Inject
private FlowRepositoryInterface flowRepository; private FlowRepositoryInterface flowRepository;
private static Flow create(String flowId, String taskId, Integer revision) { private static FlowWithSource create(String flowId, String taskId, Integer revision) {
return create(null, flowId, taskId, revision); return create(null, TEST_NAMESPACE, flowId, taskId, revision);
} }
private static Flow create(String tenantId, String flowId, String taskId, Integer revision) { private static FlowWithSource create(String tenantId, String namespace, String flowId, String taskId, Integer revision) {
return Flow.builder() FlowWithSource flow = FlowWithSource.builder()
.id(flowId) .id(flowId)
.namespace("io.kestra.unittest") .namespace(namespace)
.tenantId(tenantId) .tenantId(tenantId)
.revision(revision) .revision(revision)
.tasks(Collections.singletonList(Return.builder() .tasks(Collections.singletonList(Return.builder()
@@ -48,6 +48,8 @@ class FlowServiceTest {
.format(Property.of("test")) .format(Property.of("test"))
.build())) .build()))
.build(); .build();
return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
} }
@Test @Test
@@ -59,7 +61,7 @@ class FlowServiceTest {
- id: task - id: task
type: io.kestra.plugin.core.log.Log type: io.kestra.plugin.core.log.Log
message: Hello"""; message: Hello""";
Flow importFlow = flowService.importFlow("my-tenant", source); FlowWithSource importFlow = flowService.importFlow("my-tenant", source);
assertThat(importFlow.getId(), is("import")); assertThat(importFlow.getId(), is("import"));
assertThat(importFlow.getNamespace(), is("some.namespace")); assertThat(importFlow.getNamespace(), is("some.namespace"));
@@ -93,7 +95,7 @@ class FlowServiceTest {
- id: task - id: task
type: io.kestra.plugin.core.log.Log type: io.kestra.plugin.core.log.Log
message: Hello"""; message: Hello""";
Flow importFlow = flowService.importFlow("my-tenant", oldSource); FlowWithSource importFlow = flowService.importFlow("my-tenant", oldSource);
assertThat(importFlow.getId(), is("import_dry")); assertThat(importFlow.getId(), is("import_dry"));
assertThat(importFlow.getNamespace(), is("some.namespace")); assertThat(importFlow.getNamespace(), is("some.namespace"));
@@ -120,18 +122,14 @@ class FlowServiceTest {
@Test @Test
void sameRevisionWithDeletedOrdered() { void sameRevisionWithDeletedOrdered() {
var flow1 = create("test", "test", 1); Stream<FlowInterface> stream = Stream.of(
var flow2 = create("test", "test2", 2); create("test", "test", 1),
var flow3 = create("test", "test2", 2).toDeleted(); create("test", "test2", 2),
var flow4 = create("test", "test2", 4); create("test", "test2", 2).toDeleted(),
Stream<FlowWithSource> stream = Stream.of( create("test", "test2", 4)
flow1.withSource(flow1.generateSource()),
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource())
); );
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList(); List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(1)); assertThat(collect.size(), is(1));
assertThat(collect.getFirst().isDeleted(), is(false)); assertThat(collect.getFirst().isDeleted(), is(false));
@@ -140,20 +138,16 @@ class FlowServiceTest {
@Test @Test
void sameRevisionWithDeletedSameRevision() { void sameRevisionWithDeletedSameRevision() {
var flow1 = create("test2", "test2", 1);
var flow2 = create("test", "test", 1); Stream<FlowInterface> stream = Stream.of(
var flow3 = create("test", "test2", 2); create("test2", "test2", 1),
var flow4 = create("test", "test3", 3); create("test", "test", 1),
var flow5 = create("test", "test2", 2).toDeleted(); create("test", "test2", 2),
Stream<FlowWithSource> stream = Stream.of( create("test", "test3", 3),
flow1.withSource(flow1.generateSource()), create("test", "test2", 2).toDeleted()
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource()),
flow5.withSource(flow5.generateSource())
); );
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList(); List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(1)); assertThat(collect.size(), is(1));
assertThat(collect.getFirst().isDeleted(), is(false)); assertThat(collect.getFirst().isDeleted(), is(false));
@@ -162,18 +156,15 @@ class FlowServiceTest {
@Test @Test
void sameRevisionWithDeletedUnordered() { void sameRevisionWithDeletedUnordered() {
var flow1 = create("test", "test", 1);
var flow2 = create("test", "test2", 2); Stream<FlowInterface> stream = Stream.of(
var flow3 = create("test", "test2", 4); create("test", "test", 1),
var flow4 = create("test", "test2", 2).toDeleted(); create("test", "test2", 2),
Stream<FlowWithSource> stream = Stream.of( create("test", "test2", 4),
flow1.withSource(flow1.generateSource()), create("test", "test2", 2).toDeleted()
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource())
); );
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList(); List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(1)); assertThat(collect.size(), is(1));
assertThat(collect.getFirst().isDeleted(), is(false)); assertThat(collect.getFirst().isDeleted(), is(false));
@@ -182,22 +173,17 @@ class FlowServiceTest {
@Test @Test
void multipleFlow() { void multipleFlow() {
var flow1 = create("test", "test", 2);
var flow2 = create("test", "test2", 1); Stream<FlowInterface> stream = Stream.of(
var flow3 = create("test2", "test2", 1); create("test", "test", 2),
var flow4 = create("test2", "test3", 3); create("test", "test2", 1),
var flow5 = create("test3", "test1", 2); create("test2", "test2", 1),
var flow6 = create("test3", "test2", 3); create("test2", "test3", 3),
Stream<FlowWithSource> stream = Stream.of( create("test3", "test1", 2),
flow1.withSource(flow1.generateSource()), create("test3", "test2", 3)
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource()),
flow5.withSource(flow5.generateSource()),
flow6.withSource(flow6.generateSource())
); );
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList(); List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(3)); assertThat(collect.size(), is(3));
assertThat(collect.stream().filter(flow -> flow.getId().equals("test")).findFirst().orElseThrow().getRevision(), is(2)); assertThat(collect.stream().filter(flow -> flow.getId().equals("test")).findFirst().orElseThrow().getRevision(), is(2));
@@ -207,7 +193,7 @@ class FlowServiceTest {
@Test @Test
void warnings() { void warnings() {
Flow flow = create("test", "test", 1).toBuilder() FlowWithSource flow = create("test", "test", 1).toBuilder()
.namespace("system") .namespace("system")
.triggers(List.of( .triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder() io.kestra.plugin.core.trigger.Flow.builder()
@@ -257,9 +243,9 @@ class FlowServiceTest {
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@Test @Test
void propertyRenamingDeprecation() { void propertyRenamingDeprecation() {
Flow flow = Flow.builder() FlowWithSource flow = FlowWithSource.builder()
.id("flowId") .id("flowId")
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.inputs(List.of( .inputs(List.of(
StringInput.builder() StringInput.builder()
.id("inputWithId") .id("inputWithId")
@@ -302,8 +288,8 @@ class FlowServiceTest {
@Test @Test
void delete() { void delete() {
Flow flow = create("deleteTest", "test", 1); FlowWithSource flow = create("deleteTest", "test", 1);
FlowWithSource saved = flowRepository.create(flow, flow.generateSource(), flow); FlowWithSource saved = flowRepository.create(GenericFlow.of(flow));
assertThat(flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId()).isPresent(), is(true)); assertThat(flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId()).isPresent(), is(true));
flowService.delete(saved); flowService.delete(saved);
assertThat(flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId()).isPresent(), is(false)); assertThat(flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId()).isPresent(), is(false));
@@ -311,26 +297,26 @@ class FlowServiceTest {
@Test @Test
void findByNamespacePrefix() { void findByNamespacePrefix() {
Flow flow = create("findByTest", "test", 1).toBuilder().namespace("some.namespace").build(); FlowWithSource flow = create(null, "some.namespace","findByTest", "test", 1);
flowRepository.create(flow, flow.generateSource(), flow); flowRepository.create(GenericFlow.of(flow));
assertThat(flowService.findByNamespacePrefix(null, "some.namespace").size(), is(1)); assertThat(flowService.findByNamespacePrefix(null, "some.namespace").size(), is(1));
} }
@Test @Test
void findById() { void findById() {
Flow flow = create("findByIdTest", "test", 1); FlowWithSource flow = create("findByIdTest", "test", 1);
FlowWithSource saved = flowRepository.create(flow, flow.generateSource(), flow); FlowWithSource saved = flowRepository.create(GenericFlow.of(flow));
assertThat(flowService.findById(null, saved.getNamespace(), saved.getId()).isPresent(), is(true)); assertThat(flowService.findById(null, saved.getNamespace(), saved.getId()).isPresent(), is(true));
} }
@Test @Test
void checkSubflowNotFound() { void checkSubflowNotFound() {
Flow flow = create("mainFlow", "task", 1).toBuilder() FlowWithSource flow = create("mainFlow", "task", 1).toBuilder()
.tasks(List.of( .tasks(List.of(
io.kestra.plugin.core.flow.Subflow.builder() io.kestra.plugin.core.flow.Subflow.builder()
.id("subflowTask") .id("subflowTask")
.type(io.kestra.plugin.core.flow.Subflow.class.getName()) .type(io.kestra.plugin.core.flow.Subflow.class.getName())
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.flowId("nonExistentSubflow") .flowId("nonExistentSubflow")
.build() .build()
)) ))
@@ -344,15 +330,15 @@ class FlowServiceTest {
@Test @Test
void checkValidSubflow() { void checkValidSubflow() {
Flow subflow = create("existingSubflow", "task", 1); FlowWithSource subflow = create("existingSubflow", "task", 1);
flowRepository.create(subflow, subflow.generateSource(), subflow); flowRepository.create(GenericFlow.of(subflow));
Flow flow = create("mainFlow", "task", 1).toBuilder() FlowWithSource flow = create("mainFlow", "task", 1).toBuilder()
.tasks(List.of( .tasks(List.of(
io.kestra.plugin.core.flow.Subflow.builder() io.kestra.plugin.core.flow.Subflow.builder()
.id("subflowTask") .id("subflowTask")
.type(io.kestra.plugin.core.flow.Subflow.class.getName()) .type(io.kestra.plugin.core.flow.Subflow.class.getName())
.namespace("io.kestra.unittest") .namespace(TEST_NAMESPACE)
.flowId("existingSubflow") .flowId("existingSubflow")
.build() .build()
)) ))

View File

@@ -6,7 +6,9 @@ import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.conditions.ConditionContext; import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.flows.PluginDefault; import io.kestra.core.models.flows.PluginDefault;
import io.kestra.core.models.tasks.RunnableTask; import io.kestra.core.models.tasks.RunnableTask;
import io.kestra.core.models.tasks.Task; import io.kestra.core.models.tasks.Task;
@@ -16,13 +18,15 @@ import io.kestra.core.models.triggers.PollingTriggerInterface;
import io.kestra.core.models.triggers.TriggerContext; import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.models.triggers.TriggerOutput; import io.kestra.core.models.triggers.TriggerOutput;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.YamlParser;
import io.kestra.plugin.core.condition.Expression; import io.kestra.plugin.core.condition.Expression;
import io.kestra.plugin.core.log.Log; import io.kestra.plugin.core.log.Log;
import io.kestra.plugin.core.trigger.Schedule; import io.kestra.plugin.core.trigger.Schedule;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import lombok.Builder;
import lombok.EqualsAndHashCode; import lombok.EqualsAndHashCode;
import lombok.*; import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder; import lombok.experimental.SuperBuilder;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@@ -62,16 +66,13 @@ class PluginDefaultServiceTest {
@Inject @Inject
private PluginDefaultService pluginDefaultService; private PluginDefaultService pluginDefaultService;
@Inject
private YamlParser yamlParser;
@Test @Test
void shouldInjectGivenFlowWithNullSource() { void shouldInjectGivenFlowWithNullSource() {
// Given // Given
FlowWithSource flow = yamlParser.parse(TEST_LOG_FLOW_SOURCE, FlowWithSource.class); FlowInterface flow = GenericFlow.fromYaml(null, TEST_LOG_FLOW_SOURCE);
// When // When
FlowWithSource result = pluginDefaultService.injectDefaults(flow); FlowWithSource result = pluginDefaultService.injectAllDefaults(flow, true);
// Then // Then
Log task = (Log) result.getTasks().getFirst(); Log task = (Log) result.getTasks().getFirst();
@@ -158,7 +159,7 @@ class PluginDefaultServiceTest {
var previousGlobalDefault = pluginDefaultService.pluginGlobalDefault; var previousGlobalDefault = pluginDefaultService.pluginGlobalDefault;
pluginDefaultService.pluginGlobalDefault = pluginGlobalDefaultConfiguration; pluginDefaultService.pluginGlobalDefault = pluginGlobalDefaultConfiguration;
final Flow injected = pluginDefaultService.injectDefaults(flowWithPluginDefault); final Flow injected = pluginDefaultService.injectAllDefaults(flowWithPluginDefault, true);
pluginDefaultService.pluginGlobalDefault = previousGlobalDefault; pluginDefaultService.pluginGlobalDefault = previousGlobalDefault;
assertThat(((DefaultPrecedenceTester) injected.getTasks().getFirst()).getPropFoo(), is(fooValue)); assertThat(((DefaultPrecedenceTester) injected.getTasks().getFirst()).getPropFoo(), is(fooValue));
@@ -176,8 +177,8 @@ class PluginDefaultServiceTest {
} }
@Test @Test
void injectFlowAndGlobals() { public void injectFlowAndGlobals() {
String source = """ String source = String.format("""
id: default-test id: default-test
namespace: io.kestra.tests namespace: io.kestra.tests
@@ -190,27 +191,30 @@ class PluginDefaultServiceTest {
tasks: tasks:
- id: test - id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666"""; set: 666
pluginDefaults:
- type: "%s"
forced: false
values:
set: 123
value: 1
arrays: [1]
- type: "%s"
forced: false
values:
set: 123
- type: "%s"
forced: false
values:
expression: "{{ test }}"
""",
DefaultTester.class.getName(),
DefaultTriggerTester.class.getName(),
Expression.class.getName()
);
FlowWithSource flow = yamlParser.parse(source, Flow.class) FlowWithSource injected = pluginDefaultService.parseFlowWithAllDefaults(null, source, false);
.withSource(source)
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of(
"value", 1,
"set", 123,
"arrays", Collections.singletonList(1)
)),
new PluginDefault(DefaultTriggerTester.class.getName(), false, ImmutableMap.of(
"set", 123
)),
new PluginDefault(Expression.class.getName(), false, ImmutableMap.of(
"expression", "{{ test }}"
))
))
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(1)); assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(1));
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(666)); assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(666));
@@ -226,41 +230,44 @@ class PluginDefaultServiceTest {
} }
@Test @Test
public void forced() { public void shouldInjectForcedDefaultsGivenForcedTrue() {
// Given
String source = """ String source = """
id: default-test id: default-test
namespace: io.kestra.tests namespace: io.kestra.tests
tasks: tasks:
- id: test - id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666"""; set: 1
pluginDefaults:
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
forced: true
values:
set: 2
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
forced: true
values:
set: 3
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
forced: false
values:
set: 4
value: 1
arrays: [1]
""";
FlowWithSource flow = yamlParser.parse(source, Flow.class) // When
.withSource(source) FlowWithSource injected = pluginDefaultService.parseFlowWithAllDefaults(null, source, false);
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(DefaultTester.class.getName(), true, ImmutableMap.of(
"set", 123
)),
new PluginDefault(DefaultTester.class.getName(), true, ImmutableMap.of(
"set", 789
)),
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of(
"value", 1,
"set", 456,
"arrays", Collections.singletonList(1)
))
))
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow); // Then
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(2));
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(123));
} }
@Test @Test
public void prefix() { public void shouldInjectDefaultGivenPrefixType() {
// Given
String source = """ String source = """
id: default-test id: default-test
namespace: io.kestra.tests namespace: io.kestra.tests
@@ -274,87 +281,80 @@ class PluginDefaultServiceTest {
tasks: tasks:
- id: test - id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666"""; set: 666
pluginDefaults:
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
values:
set: 789
- type: io.kestra.core.services.
values:
set: 456
value: 2
- type: io.kestra.core.services2.
values:
value: 3
""";
FlowWithSource flow = yamlParser.parse(source, Flow.class) // When
.withSource(source) FlowWithSource injected = pluginDefaultService.parseFlowWithAllDefaults(null, source, false);
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of(
"set", 789
)),
new PluginDefault("io.kestra.core.services.", false, ImmutableMap.of(
"value", 2,
"set", 456,
"arrays", Collections.singletonList(1)
)),
new PluginDefault("io.kestra.core.services2.", false, ImmutableMap.of(
"value", 3
))
))
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
// Then
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(666)); assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(666));
assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(2)); assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(2));
} }
@Test @Test
void alias() { void shouldInjectFlowDefaultsGivenAlias() {
String source = """ // Given
id: default-test GenericFlow flow = GenericFlow.fromYaml(null, """
namespace: io.kestra.tests id: default-test
namespace: io.kestra.tests
tasks: tasks:
- id: test - id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666"""; set: 666
FlowWithSource flow = yamlParser.parse(source, Flow.class) pluginDefaults:
.withSource(source) - type: io.kestra.core.services.DefaultTesterAlias
.toBuilder() values:
.pluginDefaults(List.of( value: 1
new PluginDefault("io.kestra.core.services.DefaultTesterAlias", false, ImmutableMap.of( """
"value", 1 );
)) // When
)) FlowWithSource injected = pluginDefaultService.injectAllDefaults(flow, true);
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
// Then
assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(1)); assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(1));
} }
@Test @Test
void defaultOverride() { void shouldInjectFlowDefaultsGivenType() {
String source = """ GenericFlow flow = GenericFlow.fromYaml(null, """
id: default-test id: default-test
namespace: io.kestra.tests namespace: io.kestra.tests
tasks: tasks:
- id: test - id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666"""; set: 666
FlowWithSource flow = yamlParser.parse(source, Flow.class) pluginDefaults:
.withSource(source) - type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
.toBuilder() values:
.pluginDefaults(List.of( defaultValue: overridden
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of( """
"defaultValue", "overridden" );
))
))
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
FlowWithSource injected = pluginDefaultService.injectAllDefaults(flow, true);
assertThat(((DefaultTester) injected.getTasks().getFirst()).getDefaultValue(), is("overridden")); assertThat(((DefaultTester) injected.getTasks().getFirst()).getDefaultValue(), is("overridden"));
} }
@Test @Test
public void taskValueOverTaskDefaults() { public void shouldNotInjectDefaultsGivenExistingTaskValue() {
String source = """ // Given
GenericFlow flow = GenericFlow.fromYaml(null, """
id: default-test id: default-test
namespace: io.kestra.tests namespace: io.kestra.tests
@@ -362,20 +362,19 @@ class PluginDefaultServiceTest {
- id: test - id: test
type: io.kestra.plugin.core.log.Log type: io.kestra.plugin.core.log.Log
message: testing message: testing
level: INFO"""; level: INFO
pluginDefaults:
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
values:
defaultValue: WARN
"""
);
FlowWithSource flow = yamlParser.parse(source, Flow.class) // When
.withSource(source) FlowWithSource injected = pluginDefaultService.injectAllDefaults(flow, true);
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(Log.class.getName(), false, ImmutableMap.of(
"level", Level.WARN
))
))
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
// Then
assertThat(((Log) injected.getTasks().getFirst()).getLevel().toString(), is(Level.INFO.name())); assertThat(((Log) injected.getTasks().getFirst()).getLevel().toString(), is(Level.INFO.name()));
} }

View File

@@ -5,6 +5,7 @@ import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun; import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.runners.RunContext; import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecutionResult; import io.kestra.core.runners.SubflowExecutionResult;
import io.kestra.plugin.core.flow.Subflow; import io.kestra.plugin.core.flow.Subflow;
@@ -38,7 +39,7 @@ import java.util.Optional;
public class BadExecutable extends Subflow { public class BadExecutable extends Subflow {
@Override @Override
public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, Flow flow, Execution execution) { public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, FlowInterface flow, Execution execution) {
throw new RuntimeException("An error!"); throw new RuntimeException("An error!");
} }
} }

View File

@@ -1,7 +1,8 @@
package io.kestra.core.services; package io.kestra.core.topologies;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.serializers.YamlParser;
import io.kestra.plugin.core.condition.ExecutionFlow; import io.kestra.plugin.core.condition.ExecutionFlow;
import io.kestra.plugin.core.condition.ExecutionStatus; import io.kestra.plugin.core.condition.ExecutionStatus;
import io.kestra.plugin.core.condition.MultipleCondition; import io.kestra.plugin.core.condition.MultipleCondition;
@@ -9,18 +10,18 @@ import io.kestra.plugin.core.condition.Expression;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.topologies.FlowRelation; import io.kestra.core.models.topologies.FlowRelation;
import io.kestra.core.serializers.YamlParser;
import io.kestra.plugin.core.debug.Return; import io.kestra.plugin.core.debug.Return;
import io.kestra.plugin.core.flow.Parallel; import io.kestra.plugin.core.flow.Parallel;
import io.kestra.plugin.core.flow.Subflow; import io.kestra.plugin.core.flow.Subflow;
import io.kestra.core.topologies.FlowTopologyService;
import io.kestra.core.utils.TestsUtils; import io.kestra.core.utils.TestsUtils;
import io.kestra.core.junit.annotations.KestraTest; import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject; import jakarta.inject.Inject;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.net.URL; import java.net.URL;
import java.nio.file.Files;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -30,15 +31,13 @@ import static org.hamcrest.Matchers.nullValue;
@KestraTest @KestraTest
class FlowTopologyServiceTest { class FlowTopologyServiceTest {
@Inject @Inject
private FlowTopologyService flowTopologyService; private FlowTopologyService flowTopologyService;
@Inject
private YamlParser yamlParser = new YamlParser();
@Test @Test
void flowTask() { void flowTask() {
FlowWithSource parent = Flow.builder() Flow parent = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("parent") .id("parent")
.revision(1) .revision(1)
@@ -55,58 +54,54 @@ class FlowTopologyServiceTest {
)) ))
.build() .build()
)) ))
.build() .build();
.withSource(null);
FlowWithSource child = Flow.builder() FlowWithSource child = FlowWithSource.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("child") .id("child")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TASK)); assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TASK));
} }
@Test @Test
void noRelation() { void noRelation() {
FlowWithSource parent = Flow.builder() Flow parent = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("parent") .id("parent")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
FlowWithSource child = Flow.builder() Flow child = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("child") .id("child")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
assertThat(flowTopologyService.isChild(parent, child), nullValue()); assertThat(flowTopologyService.isChild(parent, child), nullValue());
} }
@Test @Test
void trigger() { void trigger() {
FlowWithSource parent = Flow.builder() Flow parent = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("parent") .id("parent")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
FlowWithSource child = Flow.builder() Flow child = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("child") .id("child")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.triggers(List.of( .triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder() io.kestra.plugin.core.trigger.Flow.builder()
.type(io.kestra.plugin.core.trigger.Flow.class.getName())
.conditions(List.of( .conditions(List.of(
ExecutionFlow.builder() ExecutionFlow.builder()
.namespace(Property.of("io.kestra.ee")) .namespace(Property.of("io.kestra.ee"))
@@ -118,42 +113,42 @@ class FlowTopologyServiceTest {
)) ))
.build() .build()
)) ))
.build() .build();
.withSource(null);
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER)); assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER));
} }
@Test @Test
void multipleCondition() { void multipleCondition() {
FlowWithSource parent = Flow.builder() Flow parent = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("parent") .id("parent")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
FlowWithSource noTrigger = Flow.builder() Flow noTrigger = Flow.builder()
.namespace("io.kestra.exclude") .namespace("io.kestra.exclude")
.id("no") .id("no")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
FlowWithSource child = Flow.builder() Flow child = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("child") .id("child")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.triggers(List.of( .triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder() io.kestra.plugin.core.trigger.Flow.builder()
.type(io.kestra.plugin.core.trigger.Flow.class.getName())
.conditions(List.of( .conditions(List.of(
ExecutionStatus.builder() ExecutionStatus.builder()
.in(Property.of(List.of(State.Type.SUCCESS))) .in(Property.of(List.of(State.Type.SUCCESS)))
.type(ExecutionStatus.class.getName())
.build(), .build(),
MultipleCondition.builder() MultipleCondition.builder()
.type(MultipleCondition.class.getName())
.conditions(Map.of( .conditions(Map.of(
"first", ExecutionFlow.builder() "first", ExecutionFlow.builder()
.namespace(Property.of("io.kestra.ee")) .namespace(Property.of("io.kestra.ee"))
@@ -175,8 +170,7 @@ class FlowTopologyServiceTest {
)) ))
.build() .build()
)) ))
.build() .build();
.withSource(null);
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER)); assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER));
@@ -185,29 +179,28 @@ class FlowTopologyServiceTest {
@Test @Test
void preconditions() { void preconditions() {
FlowWithSource parent = Flow.builder() Flow parent = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("parent") .id("parent")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
FlowWithSource noTrigger = Flow.builder() Flow noTrigger = Flow.builder()
.namespace("io.kestra.exclude") .namespace("io.kestra.exclude")
.id("no") .id("no")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.build() .build();
.withSource(null);
FlowWithSource child = Flow.builder() Flow child = Flow.builder()
.namespace("io.kestra.ee") .namespace("io.kestra.ee")
.id("child") .id("child")
.revision(1) .revision(1)
.tasks(List.of(returnTask())) .tasks(List.of(returnTask()))
.triggers(List.of( .triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder() io.kestra.plugin.core.trigger.Flow.builder()
.type(io.kestra.plugin.core.trigger.Flow.class.getName())
.preconditions(io.kestra.plugin.core.trigger.Flow.Preconditions.builder() .preconditions(io.kestra.plugin.core.trigger.Flow.Preconditions.builder()
.flows(List.of( .flows(List.of(
io.kestra.plugin.core.trigger.Flow.UpstreamFlow.builder().namespace("io.kestra.ee").flowId("parent").build(), io.kestra.plugin.core.trigger.Flow.UpstreamFlow.builder().namespace("io.kestra.ee").flowId("parent").build(),
@@ -217,8 +210,7 @@ class FlowTopologyServiceTest {
) )
.build() .build()
)) ))
.build() .build();
.withSource(null);
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER)); assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER));
@@ -226,16 +218,15 @@ class FlowTopologyServiceTest {
} }
@Test @Test
void self1() { void self1() throws IOException {
FlowWithSource flow = parse("flows/valids/trigger-multiplecondition-listener.yaml").toBuilder().revision(1).build().withSource(null); Flow flow = parse("flows/valids/trigger-multiplecondition-listener.yaml").toBuilder().revision(1).build();
assertThat(flowTopologyService.isChild(flow, flow), nullValue()); assertThat(flowTopologyService.isChild(flow, flow), nullValue());
} }
@Test @Test
void self() { void self() throws IOException {
FlowWithSource flow = parse("flows/valids/trigger-flow-listener.yaml").toBuilder().revision(1).build().withSource(null); Flow flow = parse("flows/valids/trigger-flow-listener.yaml").toBuilder().revision(1).build();
assertThat(flowTopologyService.isChild(flow, flow), nullValue()); assertThat(flowTopologyService.isChild(flow, flow), nullValue());
} }
@@ -247,12 +238,12 @@ class FlowTopologyServiceTest {
.build(); .build();
} }
private Flow parse(String path) { private Flow parse(String path) throws IOException {
URL resource = TestsUtils.class.getClassLoader().getResource(path); URL resource = TestsUtils.class.getClassLoader().getResource(path);
assert resource != null; assert resource != null;
File file = new File(resource.getFile()); File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class); return YamlParser.parse(Files.readString(file.toPath()), Flow.class);
} }
} }

View File

@@ -21,8 +21,6 @@ import static org.hamcrest.Matchers.is;
class FlowValidationTest { class FlowValidationTest {
@Inject @Inject
private ModelValidator modelValidator; private ModelValidator modelValidator;
@Inject
private YamlParser yamlParser;
@Test @Test
void invalidRecursiveFlow() { void invalidRecursiveFlow() {
@@ -57,6 +55,6 @@ class FlowValidationTest {
File file = new File(resource.getFile()); File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class); return YamlParser.parse(file, Flow.class);
} }
} }

View File

@@ -28,8 +28,6 @@ import org.junit.jupiter.api.Test;
@KestraTest(startRunner = true) @KestraTest(startRunner = true)
public class DagTest { public class DagTest {
@Inject
YamlParser yamlParser = new YamlParser();
@Inject @Inject
ModelValidator modelValidator; ModelValidator modelValidator;
@@ -96,6 +94,6 @@ public class DagTest {
File file = new File(resource.getFile()); File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class); return YamlParser.parse(file, Flow.class);
} }
} }

View File

@@ -5,13 +5,13 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State; import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property; import io.kestra.core.models.property.Property;
import io.kestra.core.queues.QueueException; import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface; import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface; import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.RunnerUtils; import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.utils.IdUtils; import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.TestsUtils; import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject; import jakarta.inject.Inject;
@@ -33,9 +33,6 @@ class TimeoutTest {
@Inject @Inject
FlowRepositoryInterface flowRepository; FlowRepositoryInterface flowRepository;
@Inject
PluginDefaultService pluginDefaultService;
@Inject @Inject
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED) @Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
private QueueInterface<LogEntry> workerTaskLogQueue; private QueueInterface<LogEntry> workerTaskLogQueue;
@@ -60,7 +57,7 @@ class TimeoutTest {
.build())) .build()))
.build(); .build();
flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource()))); flowRepository.create(GenericFlow.of(flow));
Execution execution = runnerUtils.runOne(flow.getTenantId(), flow.getNamespace(), flow.getId()); Execution execution = runnerUtils.runOne(flow.getTenantId(), flow.getNamespace(), flow.getId());

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.h2;
import io.kestra.core.models.QueryFilter; import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepository; import io.kestra.jdbc.repository.AbstractJdbcFlowRepository;
import io.micronaut.context.ApplicationContext; import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject; import jakarta.inject.Inject;
@@ -16,7 +17,7 @@ import java.util.Map;
@H2RepositoryEnabled @H2RepositoryEnabled
public class H2FlowRepository extends AbstractJdbcFlowRepository { public class H2FlowRepository extends AbstractJdbcFlowRepository {
@Inject @Inject
public H2FlowRepository(@Named("flows") H2Repository<Flow> repository, public H2FlowRepository(@Named("flows") H2Repository<FlowInterface> repository,
ApplicationContext applicationContext) { ApplicationContext applicationContext) {
super(repository, applicationContext); super(repository, applicationContext);
} }

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.h2;
import io.kestra.core.models.QueryFilter; import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.AbstractJdbcRepository; import io.kestra.jdbc.AbstractJdbcRepository;
import org.jooq.*; import org.jooq.*;
import org.jooq.impl.DSL; import org.jooq.impl.DSL;
@@ -14,7 +15,7 @@ import static io.kestra.core.models.QueryFilter.Op.EQUALS;
import static io.kestra.jdbc.repository.AbstractJdbcRepository.field; import static io.kestra.jdbc.repository.AbstractJdbcRepository.field;
public abstract class H2FlowRepositoryService { public abstract class H2FlowRepositoryService {
public static Condition findCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query, Map<String, String> labels) { public static Condition findCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query, Map<String, String> labels) {
List<Condition> conditions = new ArrayList<>(); List<Condition> conditions = new ArrayList<>();
if (query != null) { if (query != null) {
@@ -35,7 +36,7 @@ public abstract class H2FlowRepositoryService {
return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions); return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions);
} }
public static Condition findSourceCodeCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query) { public static Condition findSourceCodeCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query) {
return jdbcRepository.fullTextCondition(List.of("source_code"), query); return jdbcRepository.fullTextCondition(List.of("source_code"), query);
} }

View File

@@ -28,7 +28,7 @@ public class H2Queue<T> extends JdbcQueue<T> {
AbstractJdbcRepository.field("offset") AbstractJdbcRepository.field("offset")
) )
.from(this.table) .from(this.table)
.where(AbstractJdbcRepository.field("type").eq(this.cls.getName())) .where(AbstractJdbcRepository.field("type").eq(queueType()))
.and(DSL.or(List.of( .and(DSL.or(List.of(
AbstractJdbcRepository.field("consumers").isNull(), AbstractJdbcRepository.field("consumers").isNull(),
DSL.condition("NOT(ARRAY_CONTAINS(\"consumers\", ?))", queueType) DSL.condition("NOT(ARRAY_CONTAINS(\"consumers\", ?))", queueType)

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled; import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry; import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.templates.Template; import io.kestra.core.models.templates.Template;
import io.kestra.core.models.triggers.Trigger; import io.kestra.core.models.triggers.Trigger;
@@ -87,8 +88,8 @@ public class H2QueueFactory implements QueueFactoryInterface {
@Singleton @Singleton
@Named(QueueFactoryInterface.FLOW_NAMED) @Named(QueueFactoryInterface.FLOW_NAMED)
@Bean(preDestroy = "close") @Bean(preDestroy = "close")
public QueueInterface<FlowWithSource> flow() { public QueueInterface<FlowInterface> flow() {
return new H2Queue<>(FlowWithSource.class, applicationContext); return new H2Queue<>(FlowInterface.class, applicationContext);
} }
@Override @Override

View File

@@ -0,0 +1,40 @@
ALTER TABLE queues ALTER COLUMN "type" ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.models.flows.FlowWithSource',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;
UPDATE queues set "type" = 'io.kestra.core.models.flows.FlowInterface' WHERE "type" = 'io.kestra.core.models.flows.FlowWithSource';
ALTER TABLE queues ALTER COLUMN "type" ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;

View File

@@ -1,12 +1,12 @@
package io.kestra.runner.h2; package io.kestra.runner.h2;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.queues.QueueFactoryInterface; import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface; import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface; import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.FlowListeners; import io.kestra.core.runners.FlowListeners;
import io.kestra.core.runners.FlowListenersTest; import io.kestra.core.runners.FlowListenersTest;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.jdbc.JdbcTestUtils; import io.kestra.jdbc.JdbcTestUtils;
import io.kestra.jdbc.JooqDSLContextWrapper; import io.kestra.jdbc.JooqDSLContextWrapper;
import jakarta.inject.Inject; import jakarta.inject.Inject;
@@ -26,12 +26,15 @@ class H2FlowListenersTest extends FlowListenersTest {
@Inject @Inject
@Named(QueueFactoryInterface.FLOW_NAMED) @Named(QueueFactoryInterface.FLOW_NAMED)
QueueInterface<FlowWithSource> flowQueue; QueueInterface<FlowInterface> flowQueue;
@Inject
PluginDefaultService pluginDefaultService;
@Test @Test
public void all() { public void all() {
// we don't inject FlowListeners to remove a flaky test // we don't inject FlowListeners to remove a flaky test
this.suite(new FlowListeners(flowRepository, flowQueue)); this.suite(new FlowListeners(flowRepository, flowQueue, pluginDefaultService));
} }
@BeforeEach @BeforeEach

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.mysql;
import io.kestra.core.models.QueryFilter; import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepository; import io.kestra.jdbc.repository.AbstractJdbcFlowRepository;
import io.micronaut.context.ApplicationContext; import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject; import jakarta.inject.Inject;
@@ -16,7 +17,7 @@ import java.util.Map;
@MysqlRepositoryEnabled @MysqlRepositoryEnabled
public class MysqlFlowRepository extends AbstractJdbcFlowRepository { public class MysqlFlowRepository extends AbstractJdbcFlowRepository {
@Inject @Inject
public MysqlFlowRepository(@Named("flows") MysqlRepository<Flow> repository, public MysqlFlowRepository(@Named("flows") MysqlRepository<FlowInterface> repository,
ApplicationContext applicationContext) { ApplicationContext applicationContext) {
super(repository, applicationContext); super(repository, applicationContext);
} }

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.mysql;
import io.kestra.core.models.QueryFilter; import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.AbstractJdbcRepository; import io.kestra.jdbc.AbstractJdbcRepository;
import org.jooq.Condition; import org.jooq.Condition;
import org.jooq.Field; import org.jooq.Field;
@@ -12,7 +13,7 @@ import java.util.*;
import static io.kestra.core.models.QueryFilter.Op.EQUALS; import static io.kestra.core.models.QueryFilter.Op.EQUALS;
public abstract class MysqlFlowRepositoryService { public abstract class MysqlFlowRepositoryService {
public static Condition findCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query, Map<String, String> labels) { public static Condition findCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query, Map<String, String> labels) {
List<Condition> conditions = new ArrayList<>(); List<Condition> conditions = new ArrayList<>();
if (query != null) { if (query != null) {
@@ -29,7 +30,7 @@ public abstract class MysqlFlowRepositoryService {
return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions); return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions);
} }
public static Condition findSourceCodeCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query) { public static Condition findSourceCodeCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query) {
return jdbcRepository.fullTextCondition(Collections.singletonList("source_code"), query); return jdbcRepository.fullTextCondition(Collections.singletonList("source_code"), query);
} }

View File

@@ -16,6 +16,7 @@ import org.jooq.DSLContext;
import org.jooq.Field; import org.jooq.Field;
import org.jooq.Record; import org.jooq.Record;
import org.jooq.RecordMapper; import org.jooq.RecordMapper;
import org.jooq.Result;
import org.jooq.Select; import org.jooq.Select;
import org.jooq.SelectConditionStep; import org.jooq.SelectConditionStep;
import org.jooq.impl.DSL; import org.jooq.impl.DSL;
@@ -58,15 +59,14 @@ public class MysqlRepository<T> extends AbstractJdbcRepository<T> {
return DSL.condition("MATCH (" + String.join(", ", fields) + ") AGAINST (? IN BOOLEAN MODE)", match); return DSL.condition("MATCH (" + String.join(", ", fields) + ") AGAINST (? IN BOOLEAN MODE)", match);
} }
@Override
public <R extends Record, E> ArrayListTotal<E> fetchPage(DSLContext context, SelectConditionStep<R> select, Pageable pageable, RecordMapper<R, E> mapper) { public <R extends Record, E> ArrayListTotal<E> fetchPage(DSLContext context, SelectConditionStep<R> select, Pageable pageable, RecordMapper<R, E> mapper) {
List<E> map = this.pageable(select, pageable) Result<R> records = this.pageable(select, pageable).fetch();
.fetch()
.map(mapper);
return dslContextWrapper.transactionResult(configuration -> new ArrayListTotal<>( return dslContextWrapper.transactionResult(configuration -> {
map, Integer rows = context.fetchOne("SELECT FOUND_ROWS()").into(Integer.class);
DSL.using(configuration).fetchOne("SELECT FOUND_ROWS()").into(Integer.class) return new ArrayListTotal<>(records.map(mapper), rows);
)); });
} }
@Override @Override

View File

@@ -37,7 +37,7 @@ public class MysqlQueue<T> extends JdbcQueue<T> {
) )
// force using the dedicated index, or it made a scan of the PK index // force using the dedicated index, or it made a scan of the PK index
.from(this.table.useIndex("ix_type__consumers")) .from(this.table.useIndex("ix_type__consumers"))
.where(AbstractJdbcRepository.field("type").eq(this.cls.getName())) .where(AbstractJdbcRepository.field("type").eq(queueType()))
.and(DSL.or(List.of( .and(DSL.or(List.of(
AbstractJdbcRepository.field("consumers").isNull(), AbstractJdbcRepository.field("consumers").isNull(),
AbstractJdbcRepository.field("consumers").in(QUEUE_CONSUMERS.allForConsumerNotIn(queueType)) AbstractJdbcRepository.field("consumers").in(QUEUE_CONSUMERS.allForConsumerNotIn(queueType))

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled; import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry; import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.templates.Template; import io.kestra.core.models.templates.Template;
import io.kestra.core.models.triggers.Trigger; import io.kestra.core.models.triggers.Trigger;
@@ -87,8 +88,8 @@ public class MysqlQueueFactory implements QueueFactoryInterface {
@Singleton @Singleton
@Named(QueueFactoryInterface.FLOW_NAMED) @Named(QueueFactoryInterface.FLOW_NAMED)
@Bean(preDestroy = "close") @Bean(preDestroy = "close")
public QueueInterface<FlowWithSource> flow() { public QueueInterface<FlowInterface> flow() {
return new MysqlQueue<>(FlowWithSource.class, applicationContext); return new MysqlQueue<>(FlowInterface.class, applicationContext);
} }
@Override @Override

View File

@@ -0,0 +1,40 @@
ALTER TABLE queues MODIFY COLUMN `type` ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.models.flows.FlowWithSource',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;
UPDATE queues set `type` = 'io.kestra.core.models.flows.FlowInterface' WHERE `type` = 'io.kestra.core.models.flows.FlowWithSource';
ALTER TABLE queues MODIFY COLUMN `type` ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.postgres;
import io.kestra.core.models.QueryFilter; import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepository; import io.kestra.jdbc.repository.AbstractJdbcFlowRepository;
import io.micronaut.context.ApplicationContext; import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject; import jakarta.inject.Inject;
@@ -16,7 +17,7 @@ import java.util.Map;
@PostgresRepositoryEnabled @PostgresRepositoryEnabled
public class PostgresFlowRepository extends AbstractJdbcFlowRepository { public class PostgresFlowRepository extends AbstractJdbcFlowRepository {
@Inject @Inject
public PostgresFlowRepository(@Named("flows") PostgresRepository<Flow> repository, public PostgresFlowRepository(@Named("flows") PostgresRepository<FlowInterface> repository,
ApplicationContext applicationContext) { ApplicationContext applicationContext) {
super(repository, applicationContext); super(repository, applicationContext);
} }

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.postgres;
import io.kestra.core.models.QueryFilter; import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow; import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowScope; import io.kestra.core.models.flows.FlowScope;
import io.kestra.jdbc.AbstractJdbcRepository; import io.kestra.jdbc.AbstractJdbcRepository;
import org.jooq.Condition; import org.jooq.Condition;
@@ -14,7 +15,7 @@ import static io.kestra.jdbc.repository.AbstractJdbcRepository.field;
import static io.kestra.jdbc.repository.AbstractJdbcTriggerRepository.NAMESPACE_FIELD; import static io.kestra.jdbc.repository.AbstractJdbcTriggerRepository.NAMESPACE_FIELD;
public abstract class PostgresFlowRepositoryService { public abstract class PostgresFlowRepositoryService {
public static Condition findCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query, Map<String, String> labels) { public static Condition findCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query, Map<String, String> labels) {
List<Condition> conditions = new ArrayList<>(); List<Condition> conditions = new ArrayList<>();
if (query != null) { if (query != null) {
@@ -31,7 +32,7 @@ public abstract class PostgresFlowRepositoryService {
return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions); return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions);
} }
public static Condition findSourceCodeCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query) { public static Condition findSourceCodeCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query) {
return jdbcRepository.fullTextCondition(Collections.singletonList("FULLTEXT_INDEX(source_code)"), query); return jdbcRepository.fullTextCondition(Collections.singletonList("FULLTEXT_INDEX(source_code)"), query);
} }

View File

@@ -37,7 +37,7 @@ public class PostgresQueue<T> extends JdbcQueue<T> {
map.put( map.put(
AbstractJdbcRepository.field("type"), AbstractJdbcRepository.field("type"),
DSL.field("CAST(? AS queue_type)", this.cls.getName()) DSL.field("CAST(? AS queue_type)", queueType())
); );
return map; return map;
@@ -59,7 +59,7 @@ public class PostgresQueue<T> extends JdbcQueue<T> {
AbstractJdbcRepository.field("offset") AbstractJdbcRepository.field("offset")
) )
.from(this.table) .from(this.table)
.where(DSL.condition("type = CAST(? AS queue_type)", this.cls.getName())) .where(DSL.condition("type = CAST(? AS queue_type)", queueType()))
.and(AbstractJdbcRepository.field("consumer_" + queueType, Boolean.class).isFalse()); .and(AbstractJdbcRepository.field("consumer_" + queueType, Boolean.class).isFalse());
if (consumerGroup != null) { if (consumerGroup != null) {

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled; import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry; import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry; import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource; import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.templates.Template; import io.kestra.core.models.templates.Template;
import io.kestra.core.models.triggers.Trigger; import io.kestra.core.models.triggers.Trigger;
@@ -87,8 +88,8 @@ public class PostgresQueueFactory implements QueueFactoryInterface {
@Singleton @Singleton
@Named(QueueFactoryInterface.FLOW_NAMED) @Named(QueueFactoryInterface.FLOW_NAMED)
@Bean(preDestroy = "close") @Bean(preDestroy = "close")
public QueueInterface<FlowWithSource> flow() { public QueueInterface<FlowInterface> flow() {
return new PostgresQueue<>(FlowWithSource.class, applicationContext); return new PostgresQueue<>(FlowInterface.class, applicationContext);
} }
@Override @Override

View File

@@ -0,0 +1,9 @@
DO $$
BEGIN
BEGIN
ALTER TYPE queue_type RENAME VALUE 'io.kestra.core.models.flows.FlowWithSource' TO 'io.kestra.core.models.flows.FlowInterface';
EXCEPTION
WHEN invalid_parameter_value THEN null;
END;
END;
$$;

Some files were not shown because too many files have changed in this diff Show More