refactor: add GenericFlow to support un-typed flow deserialization

Add new FlowId, FlowInterface and GenericFlow classes to support
deserialization of flow with un-typed plugins (i.e., tasks, triggers)
in order to inject defaults prior to strongly-typed deserialization.
This commit is contained in:
Florian Hussonnois
2025-03-14 14:33:21 +01:00
committed by Florian Hussonnois
parent fc8732f96e
commit 8f29a72df7
124 changed files with 2420 additions and 1609 deletions

View File

@@ -74,10 +74,9 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
}
}
// bug in micronaut, we can't inject YamlFlowParser & ModelValidator, so we inject from implementation
// bug in micronaut, we can't inject ModelValidator, so we inject from implementation
public Integer call(
Class<?> cls,
YamlParser yamlParser,
ModelValidator modelValidator,
Function<Object, String> identity,
Function<Object, List<String>> warningsFunction,
@@ -94,7 +93,7 @@ public abstract class AbstractValidateCommand extends AbstractApiCommand {
.filter(YamlParser::isValidExtension)
.forEach(path -> {
try {
Object parse = yamlParser.parse(path.toFile(), cls);
Object parse = YamlParser.parse(path.toFile(), cls);
modelValidator.validate(parse);
stdOut("@|green \u2713|@ - " + identity.apply(parse));
List<String> warnings = warningsFunction.apply(parse);

View File

@@ -29,8 +29,7 @@ public class FlowDotCommand extends AbstractCommand {
public Integer call() throws Exception {
super.call();
YamlParser parser = applicationContext.getBean(YamlParser.class);
Flow flow = parser.parse(file.toFile(), Flow.class);
Flow flow = YamlParser.parse(file.toFile(), Flow.class);
GraphCluster graph = GraphUtils.of(flow, null);

View File

@@ -20,9 +20,6 @@ public class FlowExpandCommand extends AbstractCommand {
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
private Path file;
@Inject
private YamlParser yamlParser;
@Inject
private ModelValidator modelValidator;
@@ -31,7 +28,7 @@ public class FlowExpandCommand extends AbstractCommand {
super.call();
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
Flow flow = yamlParser.parse(content, Flow.class);
Flow flow = YamlParser.parse(content, Flow.class);
modelValidator.validate(flow);
stdOut(content);
return 0;

View File

@@ -1,9 +1,8 @@
package io.kestra.cli.commands.flows;
import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.services.FlowService;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -16,8 +15,6 @@ import java.util.List;
description = "Validate a flow"
)
public class FlowValidateCommand extends AbstractValidateCommand {
@Inject
private YamlParser yamlParser;
@Inject
private ModelValidator modelValidator;
@@ -28,23 +25,22 @@ public class FlowValidateCommand extends AbstractValidateCommand {
@Override
public Integer call() throws Exception {
return this.call(
Flow.class,
yamlParser,
FlowWithSource.class,
modelValidator,
(Object object) -> {
Flow flow = (Flow) object;
FlowWithSource flow = (FlowWithSource) object;
return flow.getNamespace() + " / " + flow.getId();
},
(Object object) -> {
Flow flow = (Flow) object;
FlowWithSource flow = (FlowWithSource) object;
List<String> warnings = new ArrayList<>();
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
warnings.addAll(flowService.warnings(flow, this.tenantId));
return warnings;
},
(Object object) -> {
Flow flow = (Flow) object;
return flowService.relocations(flow.generateSource()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
FlowWithSource flow = (FlowWithSource) object;
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
}
);
}

View File

@@ -10,7 +10,6 @@ import io.micronaut.http.MediaType;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -27,8 +26,6 @@ import java.util.List;
)
@Slf4j
public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@Inject
public YamlParser yamlParser;
@CommandLine.Option(names = {"--override-namespaces"}, negatable = true, description = "Replace namespace of all flows by the one provided")
public boolean override = false;

View File

@@ -2,6 +2,7 @@ package io.kestra.cli.commands.sys;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -9,6 +10,7 @@ import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
import java.util.List;
import java.util.Objects;
@CommandLine.Command(
name = "reindex",
@@ -33,8 +35,8 @@ public class ReindexCommand extends AbstractCommand {
List<Flow> allFlow = flowRepository.findAllForAllTenants();
allFlow.stream()
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
.filter(flow -> flow != null)
.forEach(flow -> flowRepository.update(flow.toFlow(), flow.toFlow(), flow.getSource(), flow.toFlow()));
.filter(Objects::nonNull)
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
}

View File

@@ -4,7 +4,6 @@ import io.kestra.cli.AbstractValidateCommand;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.templates.TemplateEnabled;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import jakarta.inject.Inject;
import picocli.CommandLine;
@@ -16,8 +15,6 @@ import java.util.Collections;
)
@TemplateEnabled
public class TemplateValidateCommand extends AbstractValidateCommand {
@Inject
private YamlParser yamlParser;
@Inject
private ModelValidator modelValidator;
@@ -26,7 +23,6 @@ public class TemplateValidateCommand extends AbstractValidateCommand {
public Integer call() throws Exception {
return this.call(
Template.class,
yamlParser,
modelValidator,
(Object object) -> {
Template template = (Template) object;

View File

@@ -10,7 +10,6 @@ import io.micronaut.http.HttpRequest;
import io.micronaut.http.MutableHttpRequest;
import io.micronaut.http.client.exceptions.HttpClientResponseException;
import io.micronaut.http.client.netty.DefaultHttpClient;
import jakarta.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import picocli.CommandLine;
@@ -27,8 +26,6 @@ import jakarta.validation.ConstraintViolationException;
@Slf4j
@TemplateEnabled
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
@Inject
public YamlParser yamlParser;
@Override
public Integer call() throws Exception {
@@ -38,7 +35,7 @@ public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpda
List<Template> templates = files
.filter(Files::isRegularFile)
.filter(YamlParser::isValidExtension)
.map(path -> yamlParser.parse(path.toFile(), Template.class))
.map(path -> YamlParser.parse(path.toFile(), Template.class))
.toList();
if (templates.isEmpty()) {

View File

@@ -1,11 +1,12 @@
package io.kestra.cli.services;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.exceptions.DeserializationException;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithPath;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.core.services.PluginDefaultService;
import io.micronaut.context.annotation.Requires;
@@ -40,9 +41,6 @@ public class FileChangedEventListener {
@Inject
private PluginDefaultService pluginDefaultService;
@Inject
private YamlParser yamlParser;
@Inject
private ModelValidator modelValidator;
@@ -59,7 +57,6 @@ public class FileChangedEventListener {
private boolean isStarted = false;
@Inject
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
this.fileWatchConfiguration = fileWatchConfiguration;
@@ -68,7 +65,7 @@ public class FileChangedEventListener {
public void startListeningFromConfig() throws IOException, InterruptedException {
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface, pluginDefaultService);
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
List<Path> paths = fileWatchConfiguration.getPaths();
this.setup(paths);
@@ -76,7 +73,7 @@ public class FileChangedEventListener {
// Init existing flows not already in files
flowListeners.listen(flows -> {
if (!isStarted) {
for (FlowWithSource flow : flows) {
for (FlowInterface flow : flows) {
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
flowToFile(flow, this.buildPath(flow));
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
@@ -137,7 +134,7 @@ public class FileChangedEventListener {
try {
String content = Files.readString(filePath, Charset.defaultCharset());
Optional<Flow> flow = parseFlow(content, entry);
Optional<FlowWithSource> flow = parseFlow(content, entry);
if (flow.isPresent()) {
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
// Check if we already have a file with the given path
@@ -156,7 +153,7 @@ public class FileChangedEventListener {
flows.add(FlowWithPath.of(flow.get(), filePath.toString()));
}
flowFilesManager.createOrUpdateFlow(flow.get(), content);
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
log.info("Flow {} from file {} has been created or modified", flow.get().getId(), entry);
}
@@ -207,11 +204,11 @@ public class FileChangedEventListener {
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.toString().endsWith(".yml") || file.toString().endsWith(".yaml")) {
String content = Files.readString(file, Charset.defaultCharset());
Optional<Flow> flow = parseFlow(content, file);
Optional<FlowWithSource> flow = parseFlow(content, file);
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
flows.add(FlowWithPath.of(flow.get(), file.toString()));
flowFilesManager.createOrUpdateFlow(flow.get(), content);
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(tenantId, content));
}
}
return FileVisitResult.CONTINUE;
@@ -223,27 +220,25 @@ public class FileChangedEventListener {
}
}
private void flowToFile(FlowWithSource flow, Path path) {
private void flowToFile(FlowInterface flow, Path path) {
Path defaultPath = path != null ? path : this.buildPath(flow);
try {
Files.writeString(defaultPath, flow.getSource());
Files.writeString(defaultPath, flow.source());
log.info("Flow {} has been written to file {}", flow.getId(), defaultPath);
} catch (IOException e) {
log.error("Error writing file: {}", defaultPath, e);
}
}
private Optional<Flow> parseFlow(String content, Path entry) {
private Optional<FlowWithSource> parseFlow(String content, Path entry) {
try {
Flow flow = yamlParser.parse(content, Flow.class);
FlowWithSource withPluginDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
modelValidator.validate(withPluginDefault);
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, content, false);
modelValidator.validate(flow);
return Optional.of(flow);
} catch (ConstraintViolationException e) {
} catch (DeserializationException | ConstraintViolationException e) {
log.warn("Error while parsing flow: {}", entry, e);
}
return Optional.empty();
}
@@ -259,7 +254,7 @@ public class FileChangedEventListener {
}
}
private Path buildPath(Flow flow) {
private Path buildPath(FlowInterface flow) {
return fileWatchConfiguration.getPaths().getFirst().resolve(flow.uidWithoutRevision() + ".yml");
}
}

View File

@@ -1,11 +1,11 @@
package io.kestra.cli.services;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
public interface FlowFilesManager {
FlowWithSource createOrUpdateFlow(Flow flow, String content);
FlowWithSource createOrUpdateFlow(GenericFlow flow);
void deleteFlow(FlowWithSource toDelete);

View File

@@ -1,27 +1,23 @@
package io.kestra.cli.services;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.PluginDefaultService;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class LocalFlowFileWatcher implements FlowFilesManager {
private final FlowRepositoryInterface flowRepository;
private final PluginDefaultService pluginDefaultService;
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository, PluginDefaultService pluginDefaultService) {
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
this.flowRepository = flowRepository;
this.pluginDefaultService = pluginDefaultService;
}
@Override
public FlowWithSource createOrUpdateFlow(Flow flow, String content) {
FlowWithSource withDefault = pluginDefaultService.injectDefaults(FlowWithSource.of(flow, content));
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
return flowRepository.findById(null, flow.getNamespace(), flow.getId())
.map(previous -> flowRepository.update(flow, previous, content, withDefault))
.orElseGet(() -> flowRepository.create(flow, content, withDefault));
.map(previous -> flowRepository.update(flow, previous))
.orElseGet(() -> flowRepository.create(flow));
}
@Override

View File

@@ -1,16 +1,15 @@
package io.kestra.cli.commands.sys.statestore;
import com.devskiller.friendly_id.FriendlyId;
import io.kestra.core.exceptions.MigrationRequiredException;
import io.kestra.core.exceptions.ResourceExpiredException;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.RunContextFactory;
import io.kestra.core.storages.StateStore;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.Hashing;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.Slugify;
import io.kestra.plugin.core.log.Log;
import io.micronaut.configuration.picocli.PicocliRunner;
@@ -27,7 +26,6 @@ import java.util.List;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.core.Is.is;
class StateStoreMigrateCommandTest {
@@ -45,7 +43,7 @@ class StateStoreMigrateCommandTest {
.namespace("some.valid.namespace." + ((int) (Math.random() * 1000000)))
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
.build();
flowRepository.create(flow, flow.generateSource(), flow);
flowRepository.create(GenericFlow.of(flow));
StorageInterface storage = ctx.getBean(StorageInterface.class);
String tenantId = flow.getTenantId();

View File

@@ -23,4 +23,5 @@ public class KestraRuntimeException extends RuntimeException {
public KestraRuntimeException(Throwable cause) {
super(cause);
}
}

View File

@@ -1,5 +1,6 @@
package io.kestra.core.models.conditions;
import io.kestra.core.models.flows.FlowInterface;
import lombok.*;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
@@ -18,7 +19,7 @@ import jakarta.validation.constraints.NotNull;
@AllArgsConstructor
public class ConditionContext {
@NotNull
private Flow flow;
private FlowInterface flow;
private Execution execution;

View File

@@ -14,6 +14,7 @@ import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.Label;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.runners.FlowableUtils;
@@ -135,8 +136,8 @@ public class Execution implements DeletedInterface, TenantInterface {
* @param labels The Flow labels.
* @return a new {@link Execution}.
*/
public static Execution newExecution(final Flow flow,
final BiFunction<Flow, Execution, Map<String, Object>> inputs,
public static Execution newExecution(final FlowInterface flow,
final BiFunction<FlowInterface, Execution, Map<String, Object>> inputs,
final List<Label> labels,
final Optional<ZonedDateTime> scheduleDate) {
Execution execution = builder()

View File

@@ -1,8 +1,12 @@
package io.kestra.core.models.flows;
import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.TenantInterface;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.kestra.core.models.Label;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.swagger.v3.oas.annotations.Hidden;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.*;
import lombok.Builder;
@@ -11,11 +15,13 @@ import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.List;
import java.util.Map;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
public abstract class AbstractFlow implements DeletedInterface, TenantInterface {
@JsonDeserialize
public abstract class AbstractFlow implements FlowInterface {
@NotNull
@NotBlank
@Pattern(regexp = "^[a-zA-Z0-9][a-zA-Z0-9._-]*")
@@ -33,6 +39,9 @@ public abstract class AbstractFlow implements DeletedInterface, TenantInterface
@Valid
List<Input<?>> inputs;
@Valid
List<Output> outputs;
@NotNull
@Builder.Default
boolean disabled = false;
@@ -46,4 +55,11 @@ public abstract class AbstractFlow implements DeletedInterface, TenantInterface
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
String tenantId;
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
List<Label> labels;
Map<String, Object> variables;
}

View File

@@ -6,28 +6,20 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.introspect.AnnotatedMember;
import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.sla.SLA;
import io.kestra.core.models.listeners.Listener;
import io.kestra.core.models.tasks.FlowableTask;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.retrys.AbstractRetry;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.models.validations.ManualConstraintViolation;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.services.FlowService;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.validations.FlowValidation;
import io.micronaut.core.annotation.Introspected;
@@ -38,11 +30,18 @@ import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;
import lombok.*;
import lombok.experimental.SuperBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* A serializable flow with no source.
* <p>
* This class is planned for deprecation - use the {@link FlowWithSource}.
*/
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@@ -67,11 +66,6 @@ public class Flow extends AbstractFlow implements HasUID {
String description;
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
List<Label> labels;
Map<String, Object> variables;
@Valid
@@ -135,61 +129,6 @@ public class Flow extends AbstractFlow implements HasUID {
@PluginProperty(beta = true)
List<SLA> sla;
/** {@inheritDoc **/
@Override
@JsonIgnore
public String uid() {
return Flow.uid(this.getTenantId(), this.getNamespace(), this.getId(), Optional.ofNullable(this.revision));
}
@JsonIgnore
public String uidWithoutRevision() {
return Flow.uidWithoutRevision(this.getTenantId(), this.getNamespace(), this.getId());
}
public static String uid(Execution execution) {
return IdUtils.fromParts(
execution.getTenantId(),
execution.getNamespace(),
execution.getFlowId(),
String.valueOf(execution.getFlowRevision())
);
}
public static String uid(String tenantId, String namespace, String id, Optional<Integer> revision) {
return IdUtils.fromParts(
tenantId,
namespace,
id,
String.valueOf(revision.orElse(-1))
);
}
public static String uidWithoutRevision(String tenantId, String namespace, String id) {
return IdUtils.fromParts(
tenantId,
namespace,
id
);
}
public static String uid(Trigger trigger) {
return IdUtils.fromParts(
trigger.getTenantId(),
trigger.getNamespace(),
trigger.getFlowId()
);
}
public static String uidWithoutRevision(Execution execution) {
return IdUtils.fromParts(
execution.getTenantId(),
execution.getNamespace(),
execution.getFlowId()
);
}
public Stream<String> allTypes() {
return Stream.of(
Optional.ofNullable(triggers).orElse(Collections.emptyList()).stream().map(AbstractTrigger::getType),
@@ -341,7 +280,7 @@ public class Flow extends AbstractFlow implements HasUID {
);
}
public boolean equalsWithoutRevision(Flow o) {
public boolean equalsWithoutRevision(FlowInterface o) {
try {
return WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(this).equals(WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(o));
} catch (JsonProcessingException e) {
@@ -381,14 +320,6 @@ public class Flow extends AbstractFlow implements HasUID {
}
}
/**
* Convenience method to generate the source of a flow.
* Equivalent to <code>FlowService.generateSource(this);</code>
*/
public String generateSource() {
return FlowService.generateSource(this);
}
public Flow toDeleted() {
return this.toBuilder()
.revision(this.revision + 1)
@@ -396,7 +327,13 @@ public class Flow extends AbstractFlow implements HasUID {
.build();
}
public FlowWithSource withSource(String source) {
return FlowWithSource.of(this, source);
/**
* {@inheritDoc}
* To be conservative a flow MUST not return any source.
*/
@Override
@JsonIgnore
public String getSource() {
return null;
}
}

View File

@@ -1,7 +1,7 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.TaskForExecution;
import io.kestra.core.models.triggers.AbstractTriggerForExecution;
import io.kestra.core.utils.ListUtils;
@@ -52,4 +52,10 @@ public class FlowForExecution extends AbstractFlow {
.deleted(flow.isDeleted())
.build();
}
@JsonIgnore
@Override
public String getSource() {
return null;
}
}

View File

@@ -0,0 +1,71 @@
package io.kestra.core.models.flows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.utils.IdUtils;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Optional;
/**
* Represents a unique and global identifier for a flow.
*/
public interface FlowId {
String getId();
String getNamespace();
Integer getRevision();
String getTenantId();
static String uid(FlowId flow) {
return uid(flow.getTenantId(), flow.getNamespace(), flow.getId(), Optional.ofNullable(flow.getRevision()));
}
static String uid(String tenantId, String namespace, String id, Optional<Integer> revision) {
return of(tenantId, namespace, id, revision.orElse(-1)).toString();
}
static String uidWithoutRevision(FlowId flow) {
return of(flow.getTenantId(), flow.getNamespace(), flow.getId(), null).toString();
}
static String uidWithoutRevision(String tenantId, String namespace, String id) {
return of(tenantId, namespace, id,null).toString();
}
static String uid(Trigger trigger) {
return of(trigger.getTenantId(), trigger.getNamespace(), trigger.getFlowId(), null).toString();
}
static String uidWithoutRevision(Execution execution) {
return of(execution.getTenantId(), execution.getNamespace(), execution.getFlowId(), null).toString();
}
/**
* Static helper method for constructing a new {@link FlowId}.
*
* @return a new {@link FlowId}.
*/
static FlowId of(String tenantId, String namespace, String id, Integer revision) {
return new Default(tenantId, namespace, id, revision);
}
@Getter
@AllArgsConstructor
class Default implements FlowId {
private final String tenantId;
private final String namespace;
private final String id;
private final Integer revision;
@Override
public String toString() {
return IdUtils.fromParts(tenantId, namespace, id, Optional.ofNullable(revision).map(String::valueOf).orElse(null));
}
}
}

View File

@@ -0,0 +1,194 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.kestra.core.models.DeletedInterface;
import io.kestra.core.models.HasSource;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.Label;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.sla.SLA;
import io.kestra.core.serializers.JacksonMapper;
import java.util.AbstractMap;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* The base interface for FLow.
*/
@JsonDeserialize(as = GenericFlow.class)
public interface FlowInterface extends FlowId, DeletedInterface, TenantInterface, HasUID, HasSource {
Pattern YAML_REVISION_MATCHER = Pattern.compile("(?m)^revision: \\d+\n?");
boolean isDisabled();
boolean isDeleted();
List<Label> getLabels();
List<Input<?>> getInputs();
List<Output> getOutputs();
Map<String, Object> getVariables();
default Concurrency getConcurrency() {
return null;
}
default List<SLA> getSla() {
return List.of();
}
String getSource();
@Override
@JsonIgnore
default String source() {
return getSource();
}
@Override
@JsonIgnore
default String uid() {
return FlowId.uid(this);
}
@JsonIgnore
default String uidWithoutRevision() {
return FlowId.uidWithoutRevision(this);
}
/**
* Checks whether this flow is equals to the given flow.
* <p>
* This method is used to compare if two flow revisions are equal.
*
* @param flow The flow to compare.
* @return {@code true} if both flows are the same. Otherwise {@code false}
*/
@JsonIgnore
default boolean isSameWithSource(final FlowInterface flow) {
return
Objects.equals(this.uidWithoutRevision(), flow.uidWithoutRevision()) &&
Objects.equals(this.isDeleted(), flow.isDeleted()) &&
Objects.equals(this.isDisabled(), flow.isDisabled()) &&
Objects.equals(sourceWithoutRevision(this.getSource()), sourceWithoutRevision(flow.getSource()));
}
/**
* Checks whether this flow matches the given {@link FlowId}.
*
* @param that The {@link FlowId}.
* @return {@code true} if the passed id matches this flow.
*/
@JsonIgnore
default boolean isSameId(FlowId that) {
if (that == null) return false;
return
Objects.equals(this.getTenantId(), that.getTenantId()) &&
Objects.equals(this.getNamespace(), that.getNamespace()) &&
Objects.equals(this.getId(), that.getId());
}
/**
* Static method for removing the 'revision' field from a flow.
*
* @param source The source.
* @return The source without revision.
*/
static String sourceWithoutRevision(final String source) {
return YAML_REVISION_MATCHER.matcher(source).replaceFirst("");
}
/**
* Returns the source code for this flow or generate one if {@code null}.
* <p>
* This method must only be used for testing purpose or for handling backward-compatibility.
*
* @return the sourcecode.
*/
default String sourceOrGenerateIfNull() {
return getSource() != null ? getSource() : SourceGenerator.generate(this);
}
/**
* Static helper class for generating source_code from a {@link FlowInterface} object.
*
* <p>
* This class must only be used for testing purpose or for handling backward-compatibility.
*/
class SourceGenerator {
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofJson()
.copy()
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
static String generate(final FlowInterface flow) {
try {
String json = NON_DEFAULT_OBJECT_MAPPER.writeValueAsString(flow);
Object map = SourceGenerator.fixSnakeYaml(JacksonMapper.toMap(json));
String source = JacksonMapper.ofYaml().writeValueAsString(map);
// remove the revision from the generated source
return sourceWithoutRevision(source);
} catch (JsonProcessingException e) {
return null;
}
}
/**
* Dirty hack but only concern previous flow with no source code in org.yaml.snakeyaml.emitter.Emitter:
* <pre>
* if (previousSpace) {
* spaceBreak = true;
* }
* </pre>
* This control will detect ` \n` as a no valid entry on a string and will break the multiline to transform in single line
*
* @param object the object to fix
* @return the modified object
*/
private static Object fixSnakeYaml(Object object) {
if (object instanceof Map<?, ?> mapValue) {
return mapValue
.entrySet()
.stream()
.map(entry -> new AbstractMap.SimpleEntry<>(
fixSnakeYaml(entry.getKey()),
fixSnakeYaml(entry.getValue())
))
.filter(entry -> entry.getValue() != null)
.collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue,
(u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
},
LinkedHashMap::new
));
} else if (object instanceof Collection<?> collectionValue) {
return collectionValue
.stream()
.map(SourceGenerator::fixSnakeYaml)
.toList();
} else if (object instanceof String item) {
if (item.contains("\n")) {
return item.replaceAll("\\s+\\n", "\\\n");
}
}
return object;
}
}
}

View File

@@ -1,14 +1,16 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.databind.JsonNode;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.serializers.JacksonMapper;
import io.micronaut.core.annotation.Introspected;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
@@ -21,11 +23,48 @@ import java.util.Optional;
public class FlowWithException extends FlowWithSource {
String exception;
public static FlowWithException from(final FlowInterface flow, final Exception exception) {
return FlowWithException.builder()
.id(flow.getId())
.tenantId(flow.getTenantId())
.namespace(flow.getNamespace())
.revision(flow.getRevision())
.deleted(flow.isDeleted())
.exception(exception.getMessage())
.tasks(List.of())
.source(flow.getSource())
.build();
}
public static Optional<FlowWithException> from(final String source, final Exception exception, final Logger log) {
log.error("Unable to deserialize a flow: {}", exception.getMessage());
try {
var jsonNode = JacksonMapper.ofJson().readTree(source);
return FlowWithException.from(jsonNode, exception);
} catch (IOException e) {
// if we cannot create a FlowWithException, ignore the message
log.error("Unexpected exception when trying to handle a deserialization error", e);
return Optional.empty();
}
}
public static Optional<FlowWithException> from(JsonNode jsonNode, Exception exception) {
if (jsonNode.hasNonNull("id") && jsonNode.hasNonNull("namespace")) {
final String tenantId;
if (jsonNode.hasNonNull("tenant_id")) {
// JsonNode is from database
tenantId = jsonNode.get("tenant_id").asText();
} else if (jsonNode.hasNonNull("tenantId")) {
// JsonNode is from queue
tenantId = jsonNode.get("tenantId").asText();
} else {
tenantId = null;
}
var flow = FlowWithException.builder()
.id(jsonNode.get("id").asText())
.tenantId(jsonNode.hasNonNull("tenant_id") ? jsonNode.get("tenant_id").asText() : null)
.tenantId(tenantId)
.namespace(jsonNode.get("namespace").asText())
.revision(jsonNode.hasNonNull("revision") ? jsonNode.get("revision").asInt() : 1)
.deleted(jsonNode.hasNonNull("deleted") && jsonNode.get("deleted").asBoolean())
@@ -39,4 +78,10 @@ public class FlowWithException extends FlowWithSource {
// if there is no id and namespace, we return null as we cannot create a meaningful FlowWithException
return Optional.empty();
}
/** {@inheritDoc} **/
@Override
public Flow toFlow() {
return this;
}
}

View File

@@ -18,22 +18,14 @@ import lombok.experimental.SuperBuilder;
@EqualsAndHashCode
@FlowValidation
public class FlowWithPath {
private FlowWithSource flow;
private FlowInterface flow;
@Nullable
private String tenantId;
private String id;
private String namespace;
private String path;
public static FlowWithPath of(FlowWithSource flow, String path) {
return FlowWithPath.builder()
.id(flow.getId())
.namespace(flow.getNamespace())
.path(path)
.build();
}
public static FlowWithPath of(Flow flow, String path) {
public static FlowWithPath of(FlowInterface flow, String path) {
return FlowWithPath.builder()
.id(flow.getId())
.namespace(flow.getNamespace())

View File

@@ -1,18 +1,22 @@
package io.kestra.core.models.flows;
import io.kestra.core.models.HasSource;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.micronaut.core.annotation.Introspected;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import java.util.Objects;
import java.util.regex.Pattern;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@Introspected
@ToString
public class FlowWithSource extends Flow implements HasSource {
public class FlowWithSource extends Flow {
String source;
@SuppressWarnings("deprecation")
@@ -42,15 +46,13 @@ public class FlowWithSource extends Flow implements HasSource {
.build();
}
private static String cleanupSource(String source) {
return source.replaceFirst("(?m)^revision: \\d+\n?","");
}
public boolean equals(Flow flow, String flowSource) {
return this.equalsWithoutRevision(flow) &&
this.source.equals(cleanupSource(flowSource));
@Override
@JsonIgnore(value = false)
public String getSource() {
return this.source;
}
@Override
public FlowWithSource toDeleted() {
return this.toBuilder()
.revision(this.revision + 1)
@@ -85,10 +87,4 @@ public class FlowWithSource extends Flow implements HasSource {
.sla(flow.sla)
.build();
}
/** {@inheritDoc} **/
@Override
public String source() {
return getSource();
}
}

View File

@@ -0,0 +1,124 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.exceptions.DeserializationException;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.Label;
import io.kestra.core.models.flows.sla.SLA;
import io.kestra.core.models.tasks.GenericTask;
import io.kestra.core.models.triggers.GenericTrigger;
import io.kestra.core.serializers.ListOrMapOfLabelDeserializer;
import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.serializers.YamlParser;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Represents an un-typed {@link FlowInterface} implementation for which
* most properties are backed by a {@link Map}.
*
* <p>
* This implementation should be preferred over other implementations when
* no direct access to tasks and triggers is required.
*/
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@JsonDeserialize
public class GenericFlow extends AbstractFlow implements HasUID {
private String id;
private String namespace;
private Integer revision;
private List<Input<?>> inputs;
private Map<String, Object> variables;
@Builder.Default
private boolean disabled = false;
@Builder.Default
private boolean deleted = false;
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
private List<Label> labels;
private String tenantId;
private String source;
private List<SLA> sla;
private Concurrency concurrency;
private List<GenericTask> tasks;
private List<GenericTrigger> triggers;
@JsonIgnore
@Builder.Default
private Map<String, Object> additionalProperties = new HashMap<>();
/**
* Static helper method for constructing a {@link GenericFlow} from {@link FlowInterface}.
*
* @param flow The flow.
* @return a new {@link GenericFlow}
* @throws DeserializationException if source cannot be deserialized.
*/
@VisibleForTesting
public static GenericFlow of(final FlowInterface flow) throws DeserializationException {
return fromYaml(flow.getTenantId(), flow.sourceOrGenerateIfNull());
}
/**
* Static helper method for constructing a {@link GenericFlow} from a YAML source.
*
* @param source The flow YAML source.
* @return a new {@link GenericFlow}
* @throws DeserializationException if source cannot be deserialized.
*/
public static GenericFlow fromYaml(final String tenantId, final String source) throws DeserializationException {
GenericFlow parsed = YamlParser.parse(source, GenericFlow.class);
return parsed.toBuilder()
.tenantId(tenantId)
.source(source)
.build();
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
public List<GenericTask> getTasks() {
return Optional.ofNullable(tasks).orElse(List.of());
}
public List<GenericTrigger> getTriggers() {
return Optional.ofNullable(triggers).orElse(List.of());
}
}

View File

@@ -5,6 +5,7 @@ import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.tasks.*;
import io.kestra.core.runners.FlowExecutorInterface;
import io.kestra.core.runners.RunContext;
@@ -52,7 +53,7 @@ public class SubflowGraphTask extends AbstractGraphTask {
}
@Override
public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, Flow flow, Execution execution) {
public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, FlowInterface flow, Execution execution) {
return subflowTask.createSubflowExecutionResult(runContext, taskRun, flow, execution);
}

View File

@@ -4,6 +4,8 @@ import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.runners.FlowExecutorInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecution;
@@ -30,7 +32,7 @@ public interface ExecutableTask<T extends Output>{
*/
Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext,
TaskRun taskRun,
Flow flow,
FlowInterface flow,
Execution execution);
/**
@@ -51,12 +53,12 @@ public interface ExecutableTask<T extends Output>{
record SubflowId(String namespace, String flowId, Optional<Integer> revision) {
public String flowUid() {
// as the Flow task can only be used in the same tenant we can hardcode null here
return Flow.uid(null, this.namespace, this.flowId, this.revision);
return FlowId.uid(null, this.namespace, this.flowId, this.revision);
}
public String flowUidWithoutRevision() {
// as the Flow task can only be used in the same tenant we can hardcode null here
return Flow.uidWithoutRevision(null, this.namespace, this.flowId);
return FlowId.uidWithoutRevision(null, this.namespace, this.flowId);
}
}

View File

@@ -0,0 +1,39 @@
package io.kestra.core.models.tasks;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.HashMap;
import java.util.Map;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@JsonDeserialize
public class GenericTask implements TaskInterface {
private String version;
private String id;
private String type;
private WorkerGroup workerGroup;
@JsonIgnore
@Builder.Default
private Map<String, Object> additionalProperties = new HashMap<>();
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}

View File

@@ -2,6 +2,7 @@ package io.kestra.core.models.topologies;
import io.kestra.core.models.TenantInterface;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.swagger.v3.oas.annotations.Hidden;
import lombok.AllArgsConstructor;
import lombok.Getter;
@@ -25,7 +26,7 @@ public class FlowNode implements TenantInterface {
String id;
public static FlowNode of(Flow flow) {
public static FlowNode of(FlowInterface flow) {
return FlowNode.builder()
.uid(flow.uidWithoutRevision())
.tenantId(flow.getTenantId())

View File

@@ -0,0 +1,40 @@
package io.kestra.core.models.triggers;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import io.kestra.core.models.tasks.WorkerGroup;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.experimental.SuperBuilder;
import java.util.HashMap;
import java.util.Map;
@SuperBuilder(toBuilder = true)
@Getter
@NoArgsConstructor
@JsonDeserialize
public class GenericTrigger implements TriggerInterface{
private String version;
private String id;
private String type;
private WorkerGroup workerGroup;
@JsonIgnore
@Builder.Default
private Map<String, Object> additionalProperties = new HashMap<>();
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}

View File

@@ -4,6 +4,8 @@ import io.kestra.core.models.HasUID;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.utils.IdUtils;
import io.kestra.plugin.core.trigger.Schedule;
@@ -81,13 +83,13 @@ public class Trigger extends TriggerContext implements HasUID {
}
public String flowUid() {
return Flow.uidWithoutRevision(this.getTenantId(), this.getNamespace(), this.getFlowId());
return FlowId.uidWithoutRevision(this.getTenantId(), this.getNamespace(), this.getFlowId());
}
/**
* Create a new Trigger with no execution information and no evaluation lock.
*/
public static Trigger of(Flow flow, AbstractTrigger abstractTrigger) {
public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger) {
return Trigger.builder()
.tenantId(flow.getTenantId())
.namespace(flow.getNamespace())
@@ -163,7 +165,7 @@ public class Trigger extends TriggerContext implements HasUID {
}
// Used to update trigger in flowListeners
public static Trigger of(Flow flow, AbstractTrigger abstractTrigger, ConditionContext conditionContext, Optional<Trigger> lastTrigger) throws Exception {
public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger, ConditionContext conditionContext, Optional<Trigger> lastTrigger) throws Exception {
ZonedDateTime nextDate = null;
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {

View File

@@ -1,6 +1,7 @@
package io.kestra.core.models.triggers.multipleflows;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.triggers.TimeWindow;
import org.apache.commons.lang3.tuple.Pair;
@@ -15,11 +16,11 @@ import java.util.Optional;
import static io.kestra.core.models.triggers.TimeWindow.Type.DURATION_WINDOW;
public interface MultipleConditionStorageInterface {
Optional<MultipleConditionWindow> get(Flow flow, String conditionId);
Optional<MultipleConditionWindow> get(FlowId flow, String conditionId);
List<MultipleConditionWindow> expired(String tenantId);
default MultipleConditionWindow getOrCreate(Flow flow, MultipleCondition multipleCondition, Map<String, Object> outputs) {
default MultipleConditionWindow getOrCreate(FlowId flow, MultipleCondition multipleCondition, Map<String, Object> outputs) {
ZonedDateTime now = ZonedDateTime.now().withNano(0);
TimeWindow timeWindow = multipleCondition.getTimeWindow() != null ? multipleCondition.getTimeWindow() : TimeWindow.builder().build();

View File

@@ -3,6 +3,7 @@ package io.kestra.core.models.triggers.multipleflows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.kestra.core.models.HasUID;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.utils.IdUtils;
import lombok.Builder;
import lombok.Value;
@@ -44,7 +45,7 @@ public class MultipleConditionWindow implements HasUID {
);
}
public static String uid(Flow flow, String conditionId) {
public static String uid(FlowId flow, String conditionId) {
return IdUtils.fromParts(
flow.getTenantId(),
flow.getNamespace(),

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.runners.*;
@@ -42,7 +43,7 @@ public interface QueueFactoryInterface {
QueueInterface<MetricEntry> metricEntry();
QueueInterface<FlowWithSource> flow();
QueueInterface<FlowInterface> flow();
QueueInterface<ExecutionKilled> kill();

View File

@@ -5,8 +5,10 @@ import io.kestra.core.models.SearchResult;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowForExecution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowScope;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.micronaut.data.model.Pageable;
import jakarta.annotation.Nullable;
@@ -176,9 +178,9 @@ public interface FlowRepositoryInterface {
.toList();
}
FlowWithSource create(Flow flow, String flowSource, Flow flowWithDefaults);
FlowWithSource create(GenericFlow flow);
FlowWithSource update(Flow flow, Flow previous, String flowSource, Flow flowWithDefaults) throws ConstraintViolationException;
FlowWithSource update(GenericFlow flow, FlowInterface previous) throws ConstraintViolationException;
FlowWithSource delete(FlowWithSource flow);
FlowWithSource delete(FlowInterface flow);
}

View File

@@ -1,11 +1,16 @@
package io.kestra.core.repositories;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.utils.Rethrow;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
@@ -15,22 +20,22 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.file.*;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import jakarta.validation.ConstraintViolationException;
import java.util.stream.Stream;
import static io.kestra.core.utils.Rethrow.throwConsumer;
@Singleton
@Slf4j
public class LocalFlowRepositoryLoader {
@Inject
private YamlParser yamlParser;
@Inject
private FlowRepositoryInterface flowRepository;
@@ -68,47 +73,32 @@ public class LocalFlowRepositoryLoader {
}
public void load(File basePath) throws IOException {
Map<String, Flow> flowByUidInRepository = flowRepository.findAllForAllTenants().stream()
.collect(Collectors.toMap(Flow::uidWithoutRevision, Function.identity()));
List<Path> list = Files.walk(basePath.toPath())
.filter(YamlParser::isValidExtension)
.toList();
Map<String, FlowInterface> flowByUidInRepository = flowRepository.findAllForAllTenants().stream()
.collect(Collectors.toMap(FlowId::uidWithoutRevision, Function.identity()));
for (Path file : list) {
try (Stream<Path> pathStream = Files.walk(basePath.toPath())) {
pathStream.filter(YamlParser::isValidExtension)
.forEach(Rethrow.throwConsumer(file -> {
try {
String flowSource = Files.readString(Path.of(file.toFile().getPath()), Charset.defaultCharset());
Flow parse = yamlParser.parse(file.toFile(), Flow.class);
modelValidator.validate(parse);
String source = Files.readString(Path.of(file.toFile().getPath()), Charset.defaultCharset());
GenericFlow parsed = GenericFlow.fromYaml(null, source);
Flow inRepository = flowByUidInRepository.get(parse.uidWithoutRevision());
FlowWithSource flowWithSource = pluginDefaultService.injectAllDefaults(parsed, false);
modelValidator.validate(flowWithSource);
if (inRepository == null) {
this.createFlow(flowSource, parse);
FlowInterface existing = flowByUidInRepository.get(flowWithSource.uidWithoutRevision());
if (existing == null) {
flowRepository.create(parsed);
log.trace("Created flow {}.{}", parsed.getNamespace(), parsed.getId());
} else {
this.udpateFlow(flowSource, parse, inRepository);
flowRepository.update(parsed, existing);
log.trace("Updated flow {}.{}", parsed.getNamespace(), parsed.getId());
}
} catch (ConstraintViolationException e) {
log.warn("Unable to create flow {}", file, e);
}
}));
}
}
private void createFlow(String flowSource, Flow parse) {
flowRepository.create(
parse,
flowSource,
parse
);
log.trace("Created flow {}.{}", parse.getNamespace(), parse.getId());
}
private void udpateFlow(String flowSource, Flow parse, Flow previous) {
flowRepository.update(
parse,
previous,
flowSource,
parse
);
log.trace("Updated flow {}.{}", parse.getNamespace(), parse.getId());
}
}

View File

@@ -1,5 +1,6 @@
package io.kestra.core.runners;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
@@ -20,8 +21,7 @@ public class DefaultFlowExecutor implements FlowExecutorInterface {
public DefaultFlowExecutor(FlowListenersInterface flowListeners, FlowRepositoryInterface flowRepository) {
this.flowRepository = flowRepository;
flowListeners.listen(flows -> this.allFlows = flows);
flowListeners.listen(flows -> allFlows = flows);
}
@Override
@@ -30,20 +30,22 @@ public class DefaultFlowExecutor implements FlowExecutorInterface {
}
@Override
public Optional<FlowWithSource> findById(String tenantId, String namespace, String id, Optional<Integer> revision) {
Optional<FlowWithSource> find = this.allFlows
@SuppressWarnings({"unchecked", "rawtypes"})
public Optional<FlowInterface> findById(String tenantId, String namespace, String id, Optional<Integer> revision) {
Optional<FlowInterface> find = this.allFlows
.stream()
.filter(flow -> ((flow.getTenantId() == null && tenantId == null) || Objects.equals(flow.getTenantId(), tenantId)) &&
flow.getNamespace().equals(namespace) &&
flow.getId().equals(id) &&
(revision.isEmpty() || revision.get().equals(flow.getRevision()))
)
.map(it -> (FlowInterface)it)
.findFirst();
if (find.isPresent()) {
return find;
} else {
return flowRepository.findByIdWithSource(tenantId, namespace, id, revision);
return (Optional) flowRepository.findByIdWithSource(tenantId, namespace, id, revision);
}
}

View File

@@ -6,6 +6,7 @@ import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.Label;
import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
@@ -143,7 +144,7 @@ public final class ExecutableUtils {
String subflowId = runContext.render(currentTask.subflowId().flowId());
Optional<Integer> subflowRevision = currentTask.subflowId().revision();
Flow flow = flowExecutorInterface.findByIdFromTask(
FlowInterface flow = flowExecutorInterface.findByIdFromTask(
currentExecution.getTenantId(),
subflowNamespace,
subflowId,
@@ -212,7 +213,7 @@ public final class ExecutableUtils {
}));
}
private static List<Label> filterLabels(List<Label> labels, Flow flow) {
private static List<Label> filterLabels(List<Label> labels, FlowInterface flow) {
if (ListUtils.isEmpty(flow.getLabels())) {
return labels;
}
@@ -304,7 +305,7 @@ public final class ExecutableUtils {
return State.Type.SUCCESS;
}
public static SubflowExecutionResult subflowExecutionResultFromChildExecution(RunContext runContext, Flow flow, Execution execution, ExecutableTask<?> executableTask, TaskRun taskRun) {
public static SubflowExecutionResult subflowExecutionResultFromChildExecution(RunContext runContext, FlowInterface flow, Execution execution, ExecutableTask<?> executableTask, TaskRun taskRun) {
try {
return executableTask
.createSubflowExecutionResult(runContext, taskRun, flow, execution)

View File

@@ -2,7 +2,6 @@ package io.kestra.core.runners;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;

View File

@@ -6,6 +6,7 @@ import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.Label;
import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.sla.Violation;
@@ -92,7 +93,7 @@ public class ExecutorService {
return this.flowExecutorInterface;
}
public Executor checkConcurrencyLimit(Executor executor, Flow flow, Execution execution, long count) {
public Executor checkConcurrencyLimit(Executor executor, FlowInterface flow, Execution execution, long count) {
// if above the limit, handle concurrency limit based on its behavior
if (count >= flow.getConcurrency().getLimit()) {
return switch (flow.getConcurrency().getBehavior()) {
@@ -902,7 +903,7 @@ public class ExecutorService {
);
} else {
executions.addAll(subflowExecutions);
Optional<FlowWithSource> flow = flowExecutorInterface.findByExecution(subflowExecutions.getFirst().getExecution());
Optional<FlowInterface> flow = flowExecutorInterface.findByExecution(subflowExecutions.getFirst().getExecution());
if (flow.isPresent()) {
// add SubflowExecutionResults to notify parents
for (SubflowExecution<?> subflowExecution : subflowExecutions) {

View File

@@ -1,7 +1,7 @@
package io.kestra.core.runners;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import java.util.Collection;
@@ -18,7 +18,7 @@ public interface FlowExecutorInterface {
* Find a flow.
* WARNING: this method will NOT check if the namespace is allowed, so it should not be used inside a task.
*/
Optional<FlowWithSource> findById(String tenantId, String namespace, String id, Optional<Integer> revision);
Optional<FlowInterface> findById(String tenantId, String namespace, String id, Optional<Integer> revision);
/**
* Whether the FlowExecutorInterface is ready to be used.
@@ -29,20 +29,15 @@ public interface FlowExecutorInterface {
* Find a flow.
* This method will check if the namespace is allowed, so it can be used inside a task.
*/
default Optional<FlowWithSource> findByIdFromTask(String tenantId, String namespace, String id, Optional<Integer> revision, String fromTenant, String fromNamespace, String fromId) {
return this.findById(
tenantId,
namespace,
id,
revision
);
default Optional<FlowInterface> findByIdFromTask(String tenantId, String namespace, String id, Optional<Integer> revision, String fromTenant, String fromNamespace, String fromId) {
return this.findById(tenantId, namespace, id, revision);
}
/**
* Find a flow from an execution.
* WARNING: this method will NOT check if the namespace is allowed, so it should not be used inside a task.
*/
default Optional<FlowWithSource> findByExecution(Execution execution) {
default Optional<FlowInterface> findByExecution(Execution execution) {
if (execution.getFlowRevision() == null) {
return Optional.empty();
}

View File

@@ -10,6 +10,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Data;
import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.RenderableInput;
import io.kestra.core.models.flows.Type;
@@ -110,7 +111,7 @@ public class FlowInputOutput {
* @param data The Execution's inputs data.
* @return The Map of typed inputs.
*/
public Mono<Map<String, Object>> readExecutionInputs(final Flow flow,
public Mono<Map<String, Object>> readExecutionInputs(final FlowInterface flow,
final Execution execution,
final Publisher<CompletedPart> data) {
return this.readExecutionInputs(flow.getInputs(), flow, execution, data);
@@ -125,7 +126,7 @@ public class FlowInputOutput {
* @return The Map of typed inputs.
*/
public Mono<Map<String, Object>> readExecutionInputs(final List<Input<?>> inputs,
final Flow flow,
final FlowInterface flow,
final Execution execution,
final Publisher<CompletedPart> data) {
return readData(inputs, execution, data, true).map(inputData -> this.readExecutionInputs(inputs, flow, execution, inputData));
@@ -189,7 +190,7 @@ public class FlowInputOutput {
* @return The Map of typed inputs.
*/
public Map<String, Object> readExecutionInputs(
final Flow flow,
final FlowInterface flow,
final Execution execution,
final Map<String, ?> data
) {
@@ -198,7 +199,7 @@ public class FlowInputOutput {
private Map<String, Object> readExecutionInputs(
final List<Input<?>> inputs,
final Flow flow,
final FlowInterface flow,
final Execution execution,
final Map<String, ?> data
) {
@@ -227,7 +228,7 @@ public class FlowInputOutput {
@VisibleForTesting
public List<InputAndValue> resolveInputs(
final List<Input<?>> inputs,
final Flow flow,
final FlowInterface flow,
final Execution execution,
final Map<String, ?> data
) {
@@ -251,7 +252,7 @@ public class FlowInputOutput {
@SuppressWarnings({"unchecked", "rawtypes"})
private InputAndValue resolveInputValue(
final @NotNull ResolvableInput resolvable,
final Flow flow,
final FlowInterface flow,
final @NotNull Execution execution,
final @NotNull Map<String, ResolvableInput> inputs) {
@@ -329,7 +330,7 @@ public class FlowInputOutput {
return resolvable.get();
}
private RunContext buildRunContextForExecutionAndInputs(final Flow flow, final Execution execution, Map<String, InputAndValue> dependencies) {
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies) {
Map<String, Object> flattenInputs = MapUtils.flattenToNestedMap(dependencies.entrySet()
.stream()
.collect(HashMap::new, (m, v) -> m.put(v.getKey(), v.getValue().value()), HashMap::putAll)
@@ -337,7 +338,7 @@ public class FlowInputOutput {
return runContextFactory.of(flow, execution, vars -> vars.withInputs(flattenInputs));
}
private Map<String, InputAndValue> resolveAllDependentInputs(final Input<?> input, final Flow flow, final Execution execution, final Map<String, ResolvableInput> inputs) {
private Map<String, InputAndValue> resolveAllDependentInputs(final Input<?> input, final FlowInterface flow, final Execution execution, final Map<String, ResolvableInput> inputs) {
return Optional.ofNullable(input.getDependsOn())
.map(DependsOn::inputs)
.stream()
@@ -350,7 +351,7 @@ public class FlowInputOutput {
}
public Map<String, Object> typedOutputs(
final Flow flow,
final FlowInterface flow,
final Execution execution,
final Map<String, Object> in
) {

View File

@@ -1,9 +1,9 @@
package io.kestra.core.runners;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.PluginDefaultService;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import io.kestra.core.queues.QueueFactoryInterface;
@@ -11,12 +11,9 @@ import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@@ -28,22 +25,24 @@ import jakarta.inject.Singleton;
@Singleton
@Slf4j
public class FlowListeners implements FlowListenersInterface {
private static final ObjectMapper MAPPER = JacksonMapper.ofJson();
private final AtomicBoolean isStarted = new AtomicBoolean(false);
private final QueueInterface<FlowWithSource> flowQueue;
private final QueueInterface<FlowInterface> flowQueue;
private final List<FlowWithSource> flows;
private final List<Consumer<List<FlowWithSource>>> consumers = new CopyOnWriteArrayList<>();
private final List<Consumer<List<FlowWithSource>>> consumers = new ArrayList<>();
private final List<BiConsumer<FlowWithSource, FlowWithSource>> consumersEach = new ArrayList<>();
private final List<BiConsumer<FlowWithSource, FlowWithSource>> consumersEach = new CopyOnWriteArrayList<>();
private final PluginDefaultService pluginDefaultService;
@Inject
public FlowListeners(
FlowRepositoryInterface flowRepository,
@Named(QueueFactoryInterface.FLOW_NAMED) QueueInterface<FlowWithSource> flowQueue
@Named(QueueFactoryInterface.FLOW_NAMED) QueueInterface<FlowInterface> flowQueue,
PluginDefaultService pluginDefaultService
) {
this.flowQueue = flowQueue;
this.flows = flowRepository.findAllWithSourceForAllTenants();
this.flows = new ArrayList<>(flowRepository.findAllWithSourceForAllTenants());
this.pluginDefaultService = pluginDefaultService;
}
@Override
@@ -53,19 +52,14 @@ public class FlowListeners implements FlowListenersInterface {
this.flowQueue.receive(either -> {
FlowWithSource flow;
if (either.isRight()) {
log.error("Unable to deserialize a flow: {}", either.getRight().getMessage());
try {
var jsonNode = MAPPER.readTree(either.getRight().getRecord());
flow = FlowWithException.from(jsonNode, either.getRight()).orElseThrow(IOException::new);
} catch (IOException e) {
// if we cannot create a FlowWithException, ignore the message
log.error("Unexpected exception when trying to handle a deserialization error", e);
flow = FlowWithException.from(either.getRight().getRecord(), either.getRight(), log).orElse(null);
if (flow == null) {
return;
}
} else {
flow = pluginDefaultService.injectVersionDefaults(either.getLeft(), true);
}
else {
flow = either.getLeft();
}
Optional<FlowWithSource> previous = this.previous(flow);
if (flow.isDeleted()) {
@@ -96,17 +90,14 @@ public class FlowListeners implements FlowListenersInterface {
}
}
private Optional<FlowWithSource> previous(FlowWithSource flow) {
private Optional<FlowWithSource> previous(final FlowWithSource flow) {
List<FlowWithSource> copy = new ArrayList<>(flows);
return copy
.stream()
.filter(r -> Objects.equals(r.getTenantId(), flow.getTenantId()) && r.getNamespace().equals(flow.getNamespace()) && r.getId().equals(flow.getId()))
.findFirst();
return copy.stream().filter(r -> r.isSameId(flow)).findFirst();
}
private boolean remove(FlowWithSource flow) {
private boolean remove(FlowInterface flow) {
synchronized (this) {
boolean remove = flows.removeIf(r -> Objects.equals(r.getTenantId(), flow.getTenantId()) && r.getNamespace().equals(flow.getNamespace()) && r.getId().equals(flow.getId()));
boolean remove = flows.removeIf(r -> r.isSameId(flow));
if (!remove && flow.isDeleted()) {
log.warn("Can't remove flow {}.{}", flow.getNamespace(), flow.getId());
}
@@ -125,8 +116,7 @@ public class FlowListeners implements FlowListenersInterface {
private void notifyConsumers() {
synchronized (this) {
this.consumers
.forEach(consumer -> consumer.accept(new ArrayList<>(this.flows)));
this.consumers.forEach(consumer -> consumer.accept(new ArrayList<>(this.flows)));
}
}

View File

@@ -5,6 +5,7 @@ import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger;
@@ -75,11 +76,11 @@ public class RunContextFactory {
return applicationContext.getBean(RunContextInitializer.class);
}
public RunContext of(Flow flow, Execution execution) {
public RunContext of(FlowInterface flow, Execution execution) {
return of(flow, execution, Function.identity());
}
public RunContext of(Flow flow, Execution execution, Function<RunVariables.Builder, RunVariables.Builder> runVariableModifier) {
public RunContext of(FlowInterface flow, Execution execution, Function<RunVariables.Builder, RunVariables.Builder> runVariableModifier) {
RunContextLogger runContextLogger = runContextLoggerFactory.create(execution);
return newBuilder()
@@ -100,11 +101,11 @@ public class RunContextFactory {
.build();
}
public RunContext of(Flow flow, Task task, Execution execution, TaskRun taskRun) {
public RunContext of(FlowInterface flow, Task task, Execution execution, TaskRun taskRun) {
return this.of(flow, task, execution, taskRun, true);
}
public RunContext of(Flow flow, Task task, Execution execution, TaskRun taskRun, boolean decryptVariables) {
public RunContext of(FlowInterface flow, Task task, Execution execution, TaskRun taskRun, boolean decryptVariables) {
RunContextLogger runContextLogger = runContextLoggerFactory.create(taskRun, task);
return newBuilder()
@@ -202,7 +203,7 @@ public class RunContextFactory {
return of(Map.of());
}
private List<String> secretInputsFromFlow(Flow flow) {
private List<String> secretInputsFromFlow(FlowInterface flow) {
if (flow == null || flow.getInputs() == null) {
return Collections.emptyList();
}

View File

@@ -5,6 +5,7 @@ import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.input.SecretInput;
@@ -73,7 +74,7 @@ public final class RunVariables {
* @param flow The flow from which to create variables.
* @return a new immutable {@link Map}.
*/
static Map<String, Object> of(final Flow flow) {
static Map<String, Object> of(final FlowInterface flow) {
ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder();
builder.put("id", flow.getId())
.put("namespace", flow.getNamespace());
@@ -105,7 +106,7 @@ public final class RunVariables {
*/
public interface Builder {
Builder withFlow(Flow flow);
Builder withFlow(FlowInterface flow);
Builder withInputs(Map<String, Object> inputs);
@@ -147,7 +148,7 @@ public final class RunVariables {
@With
public static class DefaultBuilder implements RunVariables.Builder {
protected Flow flow;
protected FlowInterface flow;
protected Task task;
protected Execution execution;
protected TaskRun taskRun;

View File

@@ -4,6 +4,7 @@ import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
@@ -47,7 +48,7 @@ public class RunnerUtils {
return this.runOne(tenantId, namespace, flowId, revision, null, null, null);
}
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException {
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException {
return this.runOne(tenantId, namespace, flowId, revision, inputs, null, null);
}
@@ -55,11 +56,11 @@ public class RunnerUtils {
return this.runOne(tenantId, namespace, flowId, null, null, duration, null);
}
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOne(tenantId, namespace, flowId, revision, inputs, duration, null);
}
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException {
public Execution runOne(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException {
return this.runOne(
flowRepository
.findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty())
@@ -69,15 +70,15 @@ public class RunnerUtils {
labels);
}
public Execution runOne(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException {
public Execution runOne(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs) throws TimeoutException, QueueException {
return this.runOne(flow, inputs, null, null);
}
public Execution runOne(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
public Execution runOne(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOne(flow, inputs, duration, null);
}
public Execution runOne(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException {
public Execution runOne(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration, List<Label> labels) throws TimeoutException, QueueException {
if (duration == null) {
duration = Duration.ofSeconds(15);
}
@@ -93,7 +94,7 @@ public class RunnerUtils {
return this.runOneUntilPaused(tenantId, namespace, flowId, null, null, null);
}
public Execution runOneUntilPaused(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
public Execution runOneUntilPaused(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOneUntilPaused(
flowRepository
.findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty())
@@ -103,7 +104,7 @@ public class RunnerUtils {
);
}
public Execution runOneUntilPaused(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
public Execution runOneUntilPaused(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
if (duration == null) {
duration = DEFAULT_MAX_WAIT_DURATION;
}
@@ -119,7 +120,7 @@ public class RunnerUtils {
return this.runOneUntilRunning(tenantId, namespace, flowId, null, null, null);
}
public Execution runOneUntilRunning(String tenantId, String namespace, String flowId, Integer revision, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
public Execution runOneUntilRunning(String tenantId, String namespace, String flowId, Integer revision, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
return this.runOneUntilRunning(
flowRepository
.findById(tenantId, namespace, flowId, revision != null ? Optional.of(revision) : Optional.empty())
@@ -129,7 +130,7 @@ public class RunnerUtils {
);
}
public Execution runOneUntilRunning(Flow flow, BiFunction<Flow, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
public Execution runOneUntilRunning(Flow flow, BiFunction<FlowInterface, Execution, Map<String, Object>> inputs, Duration duration) throws TimeoutException, QueueException {
if (duration == null) {
duration = DEFAULT_MAX_WAIT_DURATION;
}

View File

@@ -13,6 +13,8 @@ import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.ExecutionKilledTrigger;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;
@@ -32,7 +34,6 @@ import io.kestra.core.utils.Await;
import io.kestra.core.utils.Either;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.models.flows.Flow;
import io.micronaut.context.ApplicationContext;
import io.micronaut.context.event.ApplicationEventPublisher;
import io.micronaut.inject.qualifiers.Qualifiers;
@@ -172,6 +173,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
// remove trigger on flow update, update local triggers store, and stop the trigger on the worker
this.flowListeners.listen((flow, previous) -> {
if (flow.isDeleted() || previous != null) {
List<AbstractTrigger> triggersDeleted = flow.isDeleted() ?
ListUtils.emptyOnNull(flow.getTriggers()) :
@@ -287,7 +289,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
flows
.stream()
.map(flow -> pluginDefaultService.injectDefaults(flow, log))
.map(flow -> pluginDefaultService.injectAllDefaults(flow, log))
.filter(Objects::nonNull)
.filter(flow -> flow.getTriggers() != null && !flow.getTriggers().isEmpty())
.flatMap(flow -> flow.getTriggers().stream().filter(trigger -> trigger instanceof WorkerTriggerInterface).map(trigger -> new FlowAndTrigger(flow, trigger)))
@@ -430,7 +432,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
List<String> flowToKeep = triggerContextsToEvaluate.stream().map(Trigger::getFlowId).toList();
triggerContextsToEvaluate.stream()
.filter(trigger -> !flows.stream().map(FlowWithSource::uidWithoutRevision).toList().contains(Flow.uid(trigger)))
.filter(trigger -> !flows.stream().map(FlowId::uidWithoutRevision).toList().contains(FlowId.uid(trigger)))
.forEach(trigger -> {
try {
this.triggerState.delete(trigger);
@@ -441,8 +443,6 @@ public abstract class AbstractScheduler implements Scheduler, Service {
return flows
.stream()
.map(flow -> pluginDefaultService.injectDefaults(flow, log))
.filter(Objects::nonNull)
.filter(flow -> flowToKeep.contains(flow.getId()))
.filter(flow -> flow.getTriggers() != null && !flow.getTriggers().isEmpty())
.filter(flow -> !flow.isDisabled() && !(flow instanceof FlowWithException))
@@ -493,9 +493,8 @@ public abstract class AbstractScheduler implements Scheduler, Service {
abstract public void handleNext(List<FlowWithSource> flows, ZonedDateTime now, BiConsumer<List<Trigger>, ScheduleContextInterface> consumer);
public List<FlowWithTriggers> schedulerTriggers() {
Map<String, FlowWithSource> flows = this.flowListeners.flows()
.stream()
.collect(Collectors.toMap(FlowWithSource::uidWithoutRevision, Function.identity()));
Map<String, FlowWithSource> flows = getFlowsWithDefaults().stream()
.collect(Collectors.toMap(FlowInterface::uidWithoutRevision, Function.identity()));
return this.triggerState.findAllForAllTenants().stream()
.filter(trigger -> flows.containsKey(trigger.flowUid()))
@@ -521,7 +520,9 @@ public abstract class AbstractScheduler implements Scheduler, Service {
ZonedDateTime now = now();
this.handleNext(this.flowListeners.flows(), now, (triggers, scheduleContext) -> {
final List<FlowWithSource> flowWithDefaults = getFlowsWithDefaults();
this.handleNext(flowWithDefaults, now, (triggers, scheduleContext) -> {
if (triggers.isEmpty()) {
return;
}
@@ -530,7 +531,7 @@ public abstract class AbstractScheduler implements Scheduler, Service {
.filter(trigger -> Boolean.FALSE.equals(trigger.getDisabled()))
.toList();
List<FlowWithTriggers> schedulable = this.computeSchedulable(flowListeners.flows(), triggerContextsToEvaluate, scheduleContext);
List<FlowWithTriggers> schedulable = this.computeSchedulable(flowWithDefaults, triggerContextsToEvaluate, scheduleContext);
metricRegistry
.counter(MetricRegistry.SCHEDULER_LOOP_COUNT)
@@ -661,6 +662,13 @@ public abstract class AbstractScheduler implements Scheduler, Service {
});
}
private List<FlowWithSource> getFlowsWithDefaults() {
return this.flowListeners.flows().stream()
.map(flow -> pluginDefaultService.injectAllDefaults(flow, log))
.filter(Objects::nonNull)
.toList();
}
private void handleEvaluateWorkerTriggerResult(SchedulerExecutionWithTrigger result, ZonedDateTime
nextExecutionDate) {
Optional.ofNullable(result)
@@ -815,35 +823,31 @@ public abstract class AbstractScheduler implements Scheduler, Service {
private Optional<SchedulerExecutionWithTrigger> evaluateScheduleTrigger(FlowWithWorkerTrigger flowWithTrigger) {
try {
FlowWithWorkerTrigger flowWithWorkerTrigger = flowWithTrigger.from(pluginDefaultService.injectDefaults(
flowWithTrigger.getFlow(),
flowWithTrigger.getConditionContext().getRunContext().logger()
));
// mutability dirty hack that forces the creation of a new triggerExecutionId
DefaultRunContext runContext = (DefaultRunContext) flowWithWorkerTrigger.getConditionContext().getRunContext();
DefaultRunContext runContext = (DefaultRunContext) flowWithTrigger.getConditionContext().getRunContext();
runContextInitializer.forScheduler(
runContext,
flowWithWorkerTrigger.getTriggerContext(),
flowWithWorkerTrigger.getAbstractTrigger()
flowWithTrigger.getTriggerContext(),
flowWithTrigger.getAbstractTrigger()
);
Optional<Execution> evaluate = ((Schedulable) flowWithWorkerTrigger.getAbstractTrigger()).evaluate(
flowWithWorkerTrigger.getConditionContext(),
flowWithWorkerTrigger.getTriggerContext()
Optional<Execution> evaluate = ((Schedulable) flowWithTrigger.getAbstractTrigger()).evaluate(
flowWithTrigger.getConditionContext(),
flowWithTrigger.getTriggerContext()
);
if (log.isDebugEnabled()) {
logService.logTrigger(
flowWithWorkerTrigger.getTriggerContext(),
flowWithTrigger.getTriggerContext(),
Level.DEBUG,
"[type: {}] {}",
flowWithWorkerTrigger.getAbstractTrigger().getType(),
flowWithTrigger.getAbstractTrigger().getType(),
evaluate.map(execution -> "New execution '" + execution.getId() + "'").orElse("Empty evaluation")
);
}
flowWithWorkerTrigger.getConditionContext().getRunContext().cleanup();
flowWithTrigger.getConditionContext().getRunContext().cleanup();
return evaluate.map(execution -> new SchedulerExecutionWithTrigger(
execution,
@@ -890,11 +894,6 @@ public abstract class AbstractScheduler implements Scheduler, Service {
}
private void sendWorkerTriggerToWorker(FlowWithWorkerTrigger flowWithTrigger) throws InternalException {
FlowWithWorkerTrigger flowWithTriggerWithDefault = flowWithTrigger.from(
pluginDefaultService.injectDefaults(flowWithTrigger.getFlow(),
flowWithTrigger.getConditionContext().getRunContext().logger())
);
if (log.isDebugEnabled()) {
logService.logTrigger(
flowWithTrigger.getTriggerContext(),
@@ -906,23 +905,23 @@ public abstract class AbstractScheduler implements Scheduler, Service {
var workerTrigger = WorkerTrigger
.builder()
.trigger(flowWithTriggerWithDefault.abstractTrigger)
.triggerContext(flowWithTriggerWithDefault.triggerContext)
.conditionContext(flowWithTriggerWithDefault.conditionContext)
.trigger(flowWithTrigger.abstractTrigger)
.triggerContext(flowWithTrigger.triggerContext)
.conditionContext(flowWithTrigger.conditionContext)
.build();
try {
Optional<WorkerGroup> workerGroup = workerGroupService.resolveGroupFromJob(workerTrigger);
if (workerGroup.isPresent()) {
// Check if the worker group exist
String tenantId = flowWithTrigger.getFlow().getTenantId();
RunContext runContext = flowWithTriggerWithDefault.conditionContext.getRunContext();
RunContext runContext = flowWithTrigger.conditionContext.getRunContext();
String workerGroupKey = runContext.render(workerGroup.get().getKey());
if (workerGroupExecutorInterface.isWorkerGroupExistForKey(workerGroupKey, tenantId)) {
// Check whether at-least one worker is available
if (workerGroupExecutorInterface.isWorkerGroupAvailableForKey(workerGroupKey)) {
this.workerJobQueue.emit(workerGroupKey, workerTrigger);
} else {
WorkerGroup.Fallback fallback = workerGroup.map(wg -> wg.getFallback()).orElse(WorkerGroup.Fallback.WAIT);
WorkerGroup.Fallback fallback = workerGroup.map(WorkerGroup::getFallback).orElse(WorkerGroup.Fallback.WAIT);
switch(fallback) {
case FAIL -> runContext.logger()
.error("No workers are available for worker group '{}', ignoring the trigger.", workerGroupKey);

View File

@@ -7,7 +7,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.exc.InvalidTypeIdException;
import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException;
import io.kestra.core.models.validations.ManualConstraintViolation;
import jakarta.inject.Singleton;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
@@ -20,8 +19,7 @@ import java.util.Collections;
import java.util.Map;
import java.util.Set;
@Singleton
public class YamlParser {
public final class YamlParser {
private static final ObjectMapper STRICT_MAPPER = JacksonMapper.ofYaml()
.enable(JsonParser.Feature.STRICT_DUPLICATE_DETECTION)
.disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE);
@@ -33,12 +31,11 @@ public class YamlParser {
return FilenameUtils.getExtension(path.toFile().getAbsolutePath()).equals("yaml") || FilenameUtils.getExtension(path.toFile().getAbsolutePath()).equals("yml");
}
public <T> T parse(String input, Class<T> cls) {
public static <T> T parse(String input, Class<T> cls) {
return read(input, cls, type(cls));
}
public <T> T parse(Map<String, Object> input, Class<T> cls, Boolean strict) {
public static <T> T parse(Map<String, Object> input, Class<T> cls, Boolean strict) {
ObjectMapper currentMapper = strict ? STRICT_MAPPER : NON_STRICT_MAPPER;
try {
@@ -56,7 +53,7 @@ public class YamlParser {
return cls.getSimpleName().toLowerCase();
}
public <T> T parse(File file, Class<T> cls) throws ConstraintViolationException {
public static <T> T parse(File file, Class<T> cls) throws ConstraintViolationException {
try {
String input = IOUtils.toString(file.toURI(), StandardCharsets.UTF_8);
return read(input, cls, type(cls));
@@ -77,13 +74,12 @@ public class YamlParser {
}
}
private <T> T read(String input, Class<T> objectClass, String resource) {
private static <T> T read(String input, Class<T> objectClass, String resource) {
try {
return STRICT_MAPPER.readValue(input, objectClass);
} catch (JsonProcessingException e) {
jsonProcessingExceptionHandler(input, resource, e);
}
return null;
}

View File

@@ -7,6 +7,7 @@ import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.multipleflows.MultipleCondition;
@@ -32,7 +33,7 @@ public class ConditionService {
private RunContextFactory runContextFactory;
@VisibleForTesting
public boolean isValid(Condition condition, Flow flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) {
public boolean isValid(Condition condition, FlowInterface flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) {
ConditionContext conditionContext = this.conditionContext(
runContextFactory.of(flow, execution),
flow,
@@ -43,11 +44,11 @@ public class ConditionService {
return this.valid(flow, Collections.singletonList(condition), conditionContext);
}
public boolean isValid(Condition condition, Flow flow, @Nullable Execution execution) {
public boolean isValid(Condition condition, FlowInterface flow, @Nullable Execution execution) {
return this.isValid(condition, flow, execution, null);
}
private void logException(Flow flow, Object condition, ConditionContext conditionContext, Exception e) {
private void logException(FlowInterface flow, Object condition, ConditionContext conditionContext, Exception e) {
conditionContext.getRunContext().logger().warn(
"[namespace: {}] [flow: {}] [condition: {}] Evaluate Condition Failed with error '{}'",
flow.getNamespace(),
@@ -116,7 +117,7 @@ public class ConditionService {
}
}
public ConditionContext conditionContext(RunContext runContext, Flow flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) {
public ConditionContext conditionContext(RunContext runContext, FlowInterface flow, @Nullable Execution execution, MultipleConditionStorageInterface multipleConditionStorage) {
return ConditionContext.builder()
.flow(flow)
.execution(execution)
@@ -129,7 +130,7 @@ public class ConditionService {
return this.conditionContext(runContext, flow, execution, null);
}
boolean valid(Flow flow, List<Condition> list, ConditionContext conditionContext) {
boolean valid(FlowInterface flow, List<Condition> list, ConditionContext conditionContext) {
return list
.stream()
.allMatch(condition -> {

View File

@@ -11,6 +11,7 @@ import io.kestra.core.models.executions.ExecutionKilledExecution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.executions.TaskRunAttempt;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.input.InputAndValue;
@@ -319,7 +320,7 @@ public class ExecutionService {
}
@SuppressWarnings("deprecation")
private Execution markAs(final Execution execution, Flow flow, String taskRunId, State.Type newState, @Nullable Map<String, Object> onResumeInputs) throws Exception {
private Execution markAs(final Execution execution, FlowInterface flow, String taskRunId, State.Type newState, @Nullable Map<String, Object> onResumeInputs) throws Exception {
Set<String> taskRunToRestart = this.taskRunToRestart(
execution,
taskRun -> taskRun.getId().equals(taskRunId)
@@ -327,9 +328,11 @@ public class ExecutionService {
Execution newExecution = execution.withMetadata(execution.getMetadata().nextAttempt());
final FlowWithSource flowWithSource = pluginDefaultService.injectVersionDefaults(flow, false);
for (String s : taskRunToRestart) {
TaskRun originalTaskRun = newExecution.findTaskRunByTaskRunId(s);
Task task = flow.findTaskByTaskId(originalTaskRun.getTaskId());
Task task = flowWithSource.findTaskByTaskId(originalTaskRun.getTaskId());
boolean isFlowable = task.isFlowable();
if (!isFlowable || s.equals(taskRunId)) {
@@ -477,7 +480,7 @@ public class ExecutionService {
* @return the execution in the new state.
* @throws Exception if the state of the execution cannot be updated
*/
public Execution resume(Execution execution, Flow flow, State.Type newState) throws Exception {
public Execution resume(Execution execution, FlowInterface flow, State.Type newState) throws Exception {
return this.resume(execution, flow, newState, (Map<String, Object>) null);
}
@@ -490,7 +493,7 @@ public class ExecutionService {
* @param flow the flow of the execution
* @return the execution in the new state.
*/
public Mono<List<InputAndValue>> validateForResume(final Execution execution, Flow flow) {
public Mono<List<InputAndValue>> validateForResume(final Execution execution, FlowInterface flow) {
return getFirstPausedTaskOr(execution, flow)
.flatMap(task -> {
if (task.isPresent() && task.get() instanceof Pause pauseTask) {
@@ -532,7 +535,7 @@ public class ExecutionService {
* @param inputs the onResume inputs
* @return the execution in the new state.
*/
public Mono<Execution> resume(final Execution execution, Flow flow, State.Type newState, @Nullable Publisher<CompletedPart> inputs) {
public Mono<Execution> resume(final Execution execution, FlowInterface flow, State.Type newState, @Nullable Publisher<CompletedPart> inputs) {
return getFirstPausedTaskOr(execution, flow)
.flatMap(task -> {
if (task.isPresent() && task.get() instanceof Pause pauseTask) {
@@ -550,12 +553,14 @@ public class ExecutionService {
});
}
private static Mono<Optional<Task>> getFirstPausedTaskOr(Execution execution, Flow flow){
private Mono<Optional<Task>> getFirstPausedTaskOr(Execution execution, FlowInterface flow){
final FlowWithSource flowWithSource = pluginDefaultService.injectVersionDefaults(flow, false);
return Mono.create(sink -> {
try {
var runningTaskRun = execution
.findFirstByState(State.Type.PAUSED)
.map(throwFunction(task -> flow.findTaskByTaskId(task.getTaskId())));
.map(throwFunction(task -> flowWithSource.findTaskByTaskId(task.getTaskId())));
sink.success(runningTaskRun);
} catch (InternalException e) {
sink.error(e);
@@ -574,7 +579,7 @@ public class ExecutionService {
* @return the execution in the new state.
* @throws Exception if the state of the execution cannot be updated
*/
public Execution resume(final Execution execution, Flow flow, State.Type newState, @Nullable Map<String, Object> inputs) throws Exception {
public Execution resume(final Execution execution, FlowInterface flow, State.Type newState, @Nullable Map<String, Object> inputs) throws Exception {
var pausedTaskRun = execution
.findFirstByState(State.Type.PAUSED);

View File

@@ -1,20 +1,23 @@
package io.kestra.core.services;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.plugins.PluginRegistry;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.utils.ListUtils;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
@@ -22,7 +25,17 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@@ -30,27 +43,99 @@ import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/**
* Provides business logic to manipulate {@link Flow}
* Provides business logic for manipulating flow objects.
*/
@Singleton
@Slf4j
public class FlowService {
private static final ObjectMapper NON_DEFAULT_OBJECT_MAPPER = JacksonMapper.ofJson()
.copy()
.setSerializationInclusion(JsonInclude.Include.NON_DEFAULT);
@Inject
Optional<FlowRepositoryInterface> flowRepository;
@Inject
YamlParser yamlParser;
@Inject
PluginDefaultService pluginDefaultService;
@Inject
PluginRegistry pluginRegistry;
@Inject
ModelValidator modelValidator;
/**
* Validates and creates the given flow.
* <p>
* The validation of the flow is done from the source after injecting all plugin default values.
*
* @param flow The flow.
* @param strictValidation Specifies whether to perform a strict validation of the flow.
* @return The created {@link FlowWithSource}.
*/
public FlowWithSource create(final GenericFlow flow, final boolean strictValidation) {
Objects.requireNonNull(flow, "Cannot create null flow");
if (flow.getSource() == null || flow.getSource().isBlank()) {
throw new IllegalArgumentException("Cannot create flow with null or blank source");
}
// Check Flow with defaults
FlowWithSource flowWithDefault = pluginDefaultService.injectAllDefaults(flow, strictValidation);
modelValidator.validate(flowWithDefault);
return repository().create(flow);
}
private FlowRepositoryInterface repository() {
return flowRepository
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
}
/**
* Validates the given flow source.
* <p>
* the YAML source can contain one or many objects.
*
* @param tenantId The tenant identifier.
* @param flows The YAML source.
* @return The list validation constraint violations.
*/
public List<ValidateConstraintViolation> validate(final String tenantId, final String flows) {
AtomicInteger index = new AtomicInteger(0);
return Stream
.of(flows.split("\\n+---\\n*?"))
.map(source -> {
ValidateConstraintViolation.ValidateConstraintViolationBuilder<?, ?> validateConstraintViolationBuilder = ValidateConstraintViolation.builder();
validateConstraintViolationBuilder.index(index.getAndIncrement());
try {
FlowWithSource flow = pluginDefaultService.parseFlowWithAllDefaults(tenantId, source, true);
Integer sentRevision = flow.getRevision();
if (sentRevision != null) {
Integer lastRevision = Optional.ofNullable(repository().lastRevision(tenantId, flow.getNamespace(), flow.getId()))
.orElse(0);
validateConstraintViolationBuilder.outdated(!sentRevision.equals(lastRevision + 1));
}
validateConstraintViolationBuilder.deprecationPaths(deprecationPaths(flow));
validateConstraintViolationBuilder.warnings(warnings(flow, tenantId));
validateConstraintViolationBuilder.infos(relocations(source).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList());
validateConstraintViolationBuilder.flow(flow.getId());
validateConstraintViolationBuilder.namespace(flow.getNamespace());
modelValidator.validate(flow);
} catch (ConstraintViolationException e) {
validateConstraintViolationBuilder.constraints(e.getMessage());
} catch (RuntimeException re) {
// In case of any error, we add a validation violation so the error is displayed in the UI.
// We may change that by throwing an internal error and handle it in the UI, but this should not occur except for rare cases
// in dev like incompatible plugin versions.
log.error("Unable to validate the flow", re);
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + re.getMessage());
}
return validateConstraintViolationBuilder.build();
})
.collect(Collectors.toList());
}
public FlowWithSource importFlow(String tenantId, String source) {
return this.importFlow(tenantId, source, false);
}
@@ -60,29 +145,33 @@ public class FlowService {
throw noRepositoryException();
}
FlowWithSource withTenant = yamlParser.parse(source, Flow.class).toBuilder()
.tenantId(tenantId)
.build()
.withSource(source);
final GenericFlow flow = GenericFlow.fromYaml(tenantId, source);
FlowRepositoryInterface flowRepository = this.flowRepository.get();
Optional<FlowWithSource> flowWithSource = flowRepository
.findByIdWithSource(withTenant.getTenantId(), withTenant.getNamespace(), withTenant.getId(), Optional.empty(), true);
if (dryRun) {
return flowWithSource
.map(previous -> {
if (previous.equals(withTenant, source) && !previous.isDeleted()) {
return previous;
} else {
return FlowWithSource.of(withTenant.toBuilder().revision(previous.getRevision() + 1).build(), source);
}
})
.orElseGet(() -> FlowWithSource.of(withTenant, source).toBuilder().revision(1).build());
}
Optional<FlowWithSource> maybeExisting = flowRepository.findByIdWithSource(
flow.getTenantId(),
flow.getNamespace(),
flow.getId(),
Optional.empty(),
true
);
return flowWithSource
.map(previous -> flowRepository.update(withTenant, previous, source, pluginDefaultService.injectDefaults(withTenant)))
.orElseGet(() -> flowRepository.create(withTenant, source, pluginDefaultService.injectDefaults(withTenant)));
// Inject default plugin 'version' props before converting
// to flow to correctly resolve all plugin type.
FlowWithSource flowToImport = pluginDefaultService.injectVersionDefaults(flow, false);
if (dryRun) {
return maybeExisting
.map(previous -> previous.isSameWithSource(flowToImport) && !previous.isDeleted() ?
previous :
FlowWithSource.of(flowToImport.toBuilder().revision(previous.getRevision() + 1).build(), source)
)
.orElseGet(() -> FlowWithSource.of(flowToImport, source).toBuilder().revision(1).build());
} else {
return maybeExisting
.map(previous -> flowRepository.update(flow, previous))
.orElseGet(() -> flowRepository.create(flow));
}
}
public List<FlowWithSource> findByNamespaceWithSource(String tenantId, String namespace) {
@@ -117,7 +206,7 @@ public class FlowService {
return flowRepository.get().findById(tenantId, namespace, flowId);
}
public Stream<FlowWithSource> keepLastVersion(Stream<FlowWithSource> stream) {
public Stream<FlowInterface> keepLastVersion(Stream<FlowInterface> stream) {
return keepLastVersionCollector(stream);
}
@@ -262,17 +351,17 @@ public class FlowService {
.filter(method -> !Modifier.isStatic(method.getModifiers()));
}
public Collection<FlowWithSource> keepLastVersion(List<FlowWithSource> flows) {
public Collection<FlowInterface> keepLastVersion(List<FlowInterface> flows) {
return keepLastVersionCollector(flows.stream()).toList();
}
public Stream<FlowWithSource> keepLastVersionCollector(Stream<FlowWithSource> stream) {
public Stream<FlowInterface> keepLastVersionCollector(Stream<FlowInterface> stream) {
// Use a Map to track the latest version of each flow
Map<String, FlowWithSource> latestFlows = new HashMap<>();
Map<String, FlowInterface> latestFlows = new HashMap<>();
stream.forEach(flow -> {
String uid = flow.uidWithoutRevision();
FlowWithSource existing = latestFlows.get(uid);
FlowInterface existing = latestFlows.get(uid);
// Update only if the current flow has a higher revision
if (existing == null || flow.getRevision() > existing.getRevision()) {
@@ -289,7 +378,7 @@ public class FlowService {
protected boolean removeUnwanted(Flow f, Execution execution) {
// we don't allow recursive
return !f.uidWithoutRevision().equals(Flow.uidWithoutRevision(execution));
return !f.uidWithoutRevision().equals(FlowId.uidWithoutRevision(execution));
}
public static List<AbstractTrigger> findRemovedTrigger(Flow flow, Flow previous) {
@@ -327,22 +416,6 @@ public class FlowService {
return source + String.format("\ndisabled: %s", disabled);
}
public static String generateSource(Flow flow) {
try {
String json = NON_DEFAULT_OBJECT_MAPPER.writeValueAsString(flow);
Object map = fixSnakeYaml(JacksonMapper.toMap(json));
String source = JacksonMapper.ofYaml().writeValueAsString(map);
// remove the revision from the generated source
return source.replaceFirst("(?m)^revision: \\d+\n?","");
} catch (JsonProcessingException e) {
log.warn("Unable to convert flow json '{}' '{}'({})", flow.getNamespace(), flow.getId(), flow.getRevision(), e);
return null;
}
}
// Used in Git plugin
public List<Flow> findByNamespacePrefix(String tenantId, String namespacePrefix) {
if (flowRepository.isEmpty()) {
@@ -361,50 +434,6 @@ public class FlowService {
return flowRepository.get().delete(flow);
}
/**
* Dirty hack but only concern previous flow with no source code in org.yaml.snakeyaml.emitter.Emitter:
* <pre>
* if (previousSpace) {
* spaceBreak = true;
* }
* </pre>
* This control will detect ` \n` as a no valid entry on a string and will break the multiline to transform in single line
*
* @param object the object to fix
* @return the modified object
*/
private static Object fixSnakeYaml(Object object) {
if (object instanceof Map<?, ?> mapValue) {
return mapValue
.entrySet()
.stream()
.map(entry -> new AbstractMap.SimpleEntry<>(
fixSnakeYaml(entry.getKey()),
fixSnakeYaml(entry.getValue())
))
.filter(entry -> entry.getValue() != null)
.collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue,
(u, v) -> {
throw new IllegalStateException(String.format("Duplicate key %s", u));
},
LinkedHashMap::new
));
} else if (object instanceof Collection<?> collectionValue) {
return collectionValue
.stream()
.map(FlowService::fixSnakeYaml)
.toList();
} else if (object instanceof String item) {
if (item.contains("\n")) {
return item.replaceAll("\\s+\\n", "\\\n");
}
}
return object;
}
/**
* Return true if the namespace is allowed from the namespace denoted by 'fromTenant' and 'fromNamespace'.
* As namespace restriction is an EE feature, this will always return true in OSS.

View File

@@ -49,7 +49,7 @@ public class FlowTriggerService {
.map(io.kestra.plugin.core.trigger.Flow.class::cast);
}
public List<Execution> computeExecutionsFromFlowTriggers(Execution execution, List<Flow> allFlows, Optional<MultipleConditionStorageInterface> multipleConditionStorage) {
public List<Execution> computeExecutionsFromFlowTriggers(Execution execution, List<? extends Flow> allFlows, Optional<MultipleConditionStorageInterface> multipleConditionStorage) {
List<FlowWithFlowTrigger> validTriggersBeforeMultipleConditionEval = allFlows.stream()
// prevent recursive flow triggers
.filter(flow -> flowService.removeUnwanted(flow, execution))

View File

@@ -56,7 +56,7 @@ public class GraphService {
public GraphCluster of(GraphCluster baseGraph, FlowWithSource flow, List<String> expandedSubflows, Map<String, FlowWithSource> flowByUid, Execution execution) throws IllegalVariableEvaluationException {
String tenantId = flow.getTenantId();
flow = pluginDefaultService.injectDefaults(flow);
flow = pluginDefaultService.injectAllDefaults(flow, false);
List<Trigger> triggers = null;
if (flow.getTriggers() != null) {
triggers = triggerRepository.find(Pageable.UNPAGED, null, tenantId, flow.getNamespace(), flow.getId(), null);
@@ -120,7 +120,7 @@ public class GraphService {
));
}
);
subflow = pluginDefaultService.injectDefaults(subflow);
subflow = pluginDefaultService.injectAllDefaults(subflow, false);
SubflowGraphTask finalSubflowGraphTask = subflowGraphTask;
return new TaskToClusterReplacer(

View File

@@ -2,7 +2,7 @@ package io.kestra.core.services;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Label;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.runners.RunContext;
import io.kestra.core.utils.ListUtils;
@@ -17,7 +17,7 @@ public final class LabelService {
/**
* Return flow labels excluding system labels.
*/
public static List<Label> labelsExcludingSystem(Flow flow) {
public static List<Label> labelsExcludingSystem(FlowInterface flow) {
return ListUtils.emptyOnNull(flow.getLabels()).stream().filter(label -> !label.key().startsWith(Label.SYSTEM_PREFIX)).toList();
}
@@ -27,7 +27,7 @@ public final class LabelService {
* Trigger labels will be rendered via the run context but not flow labels.
* In case rendering is not possible, the label will be omitted.
*/
public static List<Label> fromTrigger(RunContext runContext, Flow flow, AbstractTrigger trigger) {
public static List<Label> fromTrigger(RunContext runContext, FlowInterface flow, AbstractTrigger trigger) {
final List<Label> labels = new ArrayList<>();
if (flow.getLabels() != null) {

View File

@@ -3,7 +3,8 @@ package io.kestra.core.services;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowId;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.repositories.LogRepositoryInterface;
import io.micronaut.context.annotation.Value;
@@ -39,7 +40,7 @@ public class LogService {
@Inject
private LogRepositoryInterface logRepository;
public void logExecution(Flow flow, Logger logger, Level level, String message, Object... args) {
public void logExecution(FlowId flow, Logger logger, Level level, String message, Object... args) {
String finalMsg = tenantEnabled ? FLOW_PREFIX_WITH_TENANT + message : FLOW_PREFIX_NO_TENANT + message;
Object[] executionArgs = tenantEnabled ?
new Object[] { flow.getTenantId(), flow.getNamespace(), flow.getId() } :

View File

@@ -2,13 +2,17 @@ package io.kestra.core.services;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.PluginDefault;
import io.kestra.core.plugins.PluginRegistry;
@@ -19,22 +23,34 @@ import io.kestra.core.runners.RunContextLogger;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.utils.MapUtils;
import io.kestra.plugin.core.flow.Template;
import io.micronaut.core.annotation.Nullable;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Provider;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.event.Level;
import java.util.*;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
/**
* Services for parsing flows and injecting plugin default values.
*/
@Singleton
@Slf4j
public class PluginDefaultService {
@@ -44,6 +60,10 @@ public class PluginDefaultService {
private static final ObjectMapper OBJECT_MAPPER = JacksonMapper.ofYaml().copy()
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
private static final String PLUGIN_DEFAULTS_FIELD = "pluginDefaults";
private static final TypeReference<List<PluginDefault>> PLUGIN_DEFAULTS_TYPE_REF = new TypeReference<>() {
};
@Nullable
@Inject
@@ -53,16 +73,16 @@ public class PluginDefaultService {
@Inject
protected PluginGlobalDefaultConfiguration pluginGlobalDefault;
@Inject
protected YamlParser yamlParser;
@Inject
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
@Nullable
protected QueueInterface<LogEntry> logQueue;
@Inject
private PluginRegistry pluginRegistry;
protected PluginRegistry pluginRegistry;
@Inject
protected Provider<LogService> logService; // lazy-init
private final AtomicBoolean warnOnce = new AtomicBoolean(false);
@@ -83,38 +103,69 @@ public class PluginDefaultService {
}
/**
* Gets all the defaults values for the given flow.
*
* @param flow the flow to extract default
* @return list of {@code PluginDefault} ordered by most important first
*/
protected List<PluginDefault> mergeAllDefaults(Flow flow) {
List<PluginDefault> list = new ArrayList<>();
if (flow.getPluginDefaults() != null) {
list.addAll(flow.getPluginDefaults());
protected List<PluginDefault> getAllDefaults(final String tenantId,
final String namespace,
final Map<String, Object> flow) {
List<PluginDefault> defaults = new ArrayList<>();
defaults.addAll(getFlowDefaults(flow));
defaults.addAll(getGlobalDefaults());
return defaults;
}
/**
* Gets the flow-level defaults values.
*
* @param flow the flow to extract default
* @return list of {@code PluginDefault} ordered by most important first
*/
protected List<PluginDefault> getFlowDefaults(final Map<String, Object> flow) {
Object defaults = flow.get(PLUGIN_DEFAULTS_FIELD);
if (defaults != null) {
return OBJECT_MAPPER.convertValue(defaults, PLUGIN_DEFAULTS_TYPE_REF);
} else {
return List.of();
}
}
/**
* Gets the global defaults values.
*
* @return list of {@code PluginDefault} ordered by most important first
*/
protected List<PluginDefault> getGlobalDefaults() {
List<PluginDefault> defaults = new ArrayList<>();
if (taskGlobalDefault != null && taskGlobalDefault.getDefaults() != null) {
if (warnOnce.compareAndSet(false, true)) {
log.warn("Global Task Defaults are deprecated, please use Global Plugin Defaults instead via the 'kestra.plugins.defaults' configuration property.");
}
list.addAll(taskGlobalDefault.getDefaults());
defaults.addAll(taskGlobalDefault.getDefaults());
}
if (pluginGlobalDefault != null && pluginGlobalDefault.getDefaults() != null) {
list.addAll(pluginGlobalDefault.getDefaults());
defaults.addAll(pluginGlobalDefault.getDefaults());
}
return list;
return defaults;
}
/**
* Inject plugin defaults into a Flow.
* In case of exception, the flow is returned as is,
* then a logger is created based on the execution to be able to log an exception in the execution logs.
* Parses the given abstract flow and injects all default values, returning a parsed {@link FlowWithSource}.
*
* <p>
* If an exception occurs during parsing, the original flow is returned unchanged, and the exception is logged
* for the passed {@code execution}
* </p>
*
* @return a parsed {@link FlowWithSource}, or a {@link FlowWithException} if parsing fails
*/
public FlowWithSource injectDefaults(FlowWithSource flow, Execution execution) {
public FlowWithSource injectDefaults(FlowInterface flow, Execution execution) {
try {
return this.injectDefaults(flow);
return this.injectAllDefaults(flow, false);
} catch (Exception e) {
RunContextLogger
.logEntries(
@@ -128,17 +179,22 @@ public class PluginDefaultService {
// silently do nothing
}
});
return flow;
return readWithoutDefaultsOrThrow(flow);
}
}
/**
* @deprecated use {@link #injectDefaults(FlowWithSource, Logger)} instead
* Parses the given abstract flow and injects all default values, returning a parsed {@link FlowWithSource}.
*
* <p>
* If an exception occurs during parsing, the original flow is returned unchanged, and the exception is logged.
* </p>
*
* @return a parsed {@link FlowWithSource}, or a {@link FlowWithException} if parsing fails
*/
@Deprecated(forRemoval = true, since = "0.20")
public Flow injectDefaults(Flow flow, Logger logger) {
public FlowWithSource injectAllDefaults(FlowInterface flow, Logger logger) {
try {
return this.injectDefaults(flow);
return this.injectAllDefaults(flow, false);
} catch (Exception e) {
logger.warn(
"Can't inject plugin defaults on tenant {}, namespace '{}', flow '{}' with errors '{}'",
@@ -148,80 +204,207 @@ public class PluginDefaultService {
e.getMessage(),
e
);
return flow;
return readWithoutDefaultsOrThrow(flow);
}
}
/**
* Inject plugin defaults into a Flow.
* In case of exception, the flow is returned as is, then the logger is used to log the exception.
*/
public FlowWithSource injectDefaults(FlowWithSource flow, Logger logger) {
private static FlowWithSource readWithoutDefaultsOrThrow(final FlowInterface flow) {
if (flow instanceof FlowWithSource item) {
return item;
}
if (flow instanceof Flow item) {
return FlowWithSource.of(item, item.sourceOrGenerateIfNull());
}
// The block below should only be reached during testing for failure scenarios
try {
return this.injectDefaults(flow);
} catch (Exception e) {
logger.warn(
"Can't inject plugin defaults on tenant {}, namespace '{}', flow '{}' with errors '{}'",
flow.getTenantId(),
flow.getNamespace(),
flow.getId(),
e.getMessage(),
e
);
return flow;
Flow parsed = NON_DEFAULT_OBJECT_MAPPER.readValue(flow.getSource(), Flow.class);
return FlowWithSource.of(parsed, flow.getSource());
} catch (JsonProcessingException e) {
throw new KestraRuntimeException("Failed to read flow from source", e);
}
}
/**
* @deprecated use {@link #injectDefaults(FlowWithSource)} instead
* Parses the given abstract flow and injects all default values, returning a parsed {@link FlowWithSource}.
*
* <p>
* If {@code strictParsing} is {@code true}, the parsing will fail in the following cases:
* </p>
* <ul>
* <li>The source contains duplicate properties.</li>
* <li>The source contains unknown properties.</li>
* </ul>
*
* @param flow the flow to be parsed
* @return a parsed {@link FlowWithSource}
*
* @throws ConstraintViolationException if {@code strictParsing} is {@code true} and the source does not meet strict validation requirements
* @throws KestraRuntimeException if an error occurs while parsing the flow and it cannot be processed
*/
@Deprecated(forRemoval = true, since = "0.20")
public Flow injectDefaults(Flow flow) throws ConstraintViolationException {
if (flow instanceof FlowWithSource flowWithSource) {
return this.injectDefaults(flowWithSource);
}
public FlowWithSource injectAllDefaults(final FlowInterface flow, final boolean strictParsing) {
Map<String, Object> flowAsMap = NON_DEFAULT_OBJECT_MAPPER.convertValue(flow, JacksonMapper.MAP_TYPE_REFERENCE);
return innerInjectDefault(flow, flowAsMap);
}
/**
* Inject plugin defaults into a Flow.
*/
public FlowWithSource injectDefaults(FlowWithSource flow) throws ConstraintViolationException {
try {
String source = flow.getSource();
if (source == null) {
// Flow revisions created from older Kestra versions may not be linked to their original source.
// In such cases, fall back to the generated source approach to enable plugin default injection.
source = flow.generateSource();
}
String source = flow.sourceOrGenerateIfNull();
if (source == null) {
// return immediately if source is still null (should never happen)
return flow;
// This should never happen
String error = "Cannot apply plugin defaults. Cause: flow has no defined source.";
logService.get().logExecution(flow, log, Level.ERROR, error);
throw new IllegalArgumentException(error);
}
Map<String, Object> flowAsMap = OBJECT_MAPPER.readValue(source, JacksonMapper.MAP_TYPE_REFERENCE);
return parseFlowWithAllDefaults(
flow.getTenantId(),
flow.getNamespace(),
flow.getRevision(),
flow.isDeleted(),
source,
false,
strictParsing
);
}
Flow withDefault = innerInjectDefault(flow, flowAsMap);
/**
* Parses the given abstract flow and injects default plugin versions, returning a parsed {@link FlowWithSource}.
*
* <p>
* If the provided flow already represents a concrete {@link FlowWithSource}, it is returned as is.
* <p/>
*
* <p>
* If {@code safe} is set to {@code true} and the given flow cannot be parsed,
* this method returns a {@link FlowWithException} instead of throwing an error.
* <p/>
*
* @param flow the flow to be parsed
* @param safe whether parsing errors should be handled gracefully
* @return a parsed {@link FlowWithSource}, or a {@link FlowWithException} if parsing fails and {@code safe} is {@code true}
*/
public FlowWithSource injectVersionDefaults(final FlowInterface flow, final boolean safe) {
if (flow instanceof FlowWithSource flowWithSource) {
// shortcut - if the flow is already fully parsed return it immediately.
return flowWithSource;
}
// revision and tenants are not in the source, so we copy them manually
return withDefault.toBuilder()
.tenantId(flow.getTenantId())
.revision(flow.getRevision())
.build()
.withSource(source);
FlowWithSource result;
String source = flow.getSource();
try {
if (source == null) {
source = OBJECT_MAPPER.writeValueAsString(flow);
}
result = parseFlowWithAllDefaults(flow.getTenantId(), flow.getNamespace(), flow.getRevision(), flow.isDeleted(), source, true, false);
} catch (Exception e) {
if (safe) {
logService.get().logExecution(flow, log, Level.ERROR, "Failed to read flow.", e);
result = FlowWithException.from(flow, e);
// deleted is not part of the original 'source'
result = result.toBuilder().deleted(flow.isDeleted()).build();
} else {
throw new KestraRuntimeException(e);
}
}
return result;
}
public Map<String, Object> injectVersionDefaults(@Nullable final String tenantId,
final String namespace,
final Map<String, Object> mapFlow) {
return innerInjectDefault(tenantId, namespace, mapFlow, true);
}
/**
* Parses and injects default into the given flow.
*
* @param tenantId the Tenant ID.
* @param source the flow source.
* @return a new {@link FlowWithSource}.
*
* @throws ConstraintViolationException when parsing flow.
*/
public FlowWithSource parseFlowWithAllDefaults(@Nullable final String tenantId, final String source, final boolean strict) throws ConstraintViolationException {
return parseFlowWithAllDefaults(tenantId, null, null, false, source, false, strict);
}
/**
* Parses and injects defaults into the given flow.
*
* @param tenant the tenant identifier.
* @param namespace the namespace.
* @param revision the flow revision.
* @param source the flow source.
* @return a new {@link FlowWithSource}.
*
* @throws ConstraintViolationException when parsing flow.
*/
private FlowWithSource parseFlowWithAllDefaults(@Nullable final String tenant,
@Nullable String namespace,
@Nullable Integer revision,
final boolean isDeleted,
final String source,
final boolean onlyVersions,
final boolean strictParsing) throws ConstraintViolationException {
try {
Map<String, Object> mapFlow = OBJECT_MAPPER.readValue(source, JacksonMapper.MAP_TYPE_REFERENCE);
namespace = namespace == null ? (String) mapFlow.get("namespace") : namespace;
revision = revision == null ? (Integer) mapFlow.get("revision") : revision;
mapFlow = innerInjectDefault(tenant, namespace, mapFlow, onlyVersions);
FlowWithSource withDefault = YamlParser.parse(mapFlow, FlowWithSource.class, strictParsing);
// revision, tenants, and deleted are not in the 'source', so we copy them manually
FlowWithSource full = withDefault.toBuilder()
.tenantId(tenant)
.revision(revision)
.deleted(isDeleted)
.source(source)
.build();
if (tenant != null) {
// This is a hack to set the tenant in template tasks.
// When using the Template task, we need the tenant to fetch the Template from the database.
// However, as the task is executed on the Executor we cannot retrieve it from the tenant service and have no other options.
// So we save it at flow creation/updating time.
full.allTasksWithChilds().stream().filter(task -> task instanceof Template).forEach(task -> ((Template) task).setTenantId(tenant));
}
return full;
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
throw new KestraRuntimeException(e);
}
}
@SuppressWarnings("unchecked")
private Flow innerInjectDefault(Flow flow, Map<String, Object> flowAsMap) {
List<PluginDefault> allDefaults = mergeAllDefaults(flow);
private Map<String, Object> innerInjectDefault(final String tenantId, final String namespace, Map<String, Object> flowAsMap, final boolean onlyVersions) {
List<PluginDefault> allDefaults = getAllDefaults(tenantId, namespace, flowAsMap);
if (onlyVersions) {
// filter only default 'version' property
allDefaults = allDefaults.stream()
.map(defaults -> {
Map<String, Object> filtered = defaults.getValues().entrySet()
.stream().filter(entry -> entry.getKey().equals("version"))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
return filtered.isEmpty() ? null : defaults.toBuilder().values(filtered).build();
})
.filter(Objects::nonNull)
.collect(Collectors.toCollection(ArrayList::new));
}
if (allDefaults.isEmpty()) {
// no defaults to inject - return immediately.
return flowAsMap;
}
addAliases(allDefaults);
Map<Boolean, List<PluginDefault>> allDefaultsGroup = allDefaults
.stream()
.collect(Collectors.groupingBy(PluginDefault::isForced, Collectors.toList()));
@@ -232,9 +415,9 @@ public class PluginDefaultService {
// forced plugin default need to be reverse, lower win
Map<String, List<PluginDefault>> forced = pluginDefaultsToMap(Lists.reverse(allDefaultsGroup.getOrDefault(true, Collections.emptyList())));
Object pluginDefaults = flowAsMap.get("pluginDefaults");
Object pluginDefaults = flowAsMap.get(PLUGIN_DEFAULTS_FIELD);
if (pluginDefaults != null) {
flowAsMap.remove("pluginDefaults");
flowAsMap.remove(PLUGIN_DEFAULTS_FIELD);
}
// we apply default and overwrite with forced
@@ -247,10 +430,11 @@ public class PluginDefaultService {
}
if (pluginDefaults != null) {
flowAsMap.put("pluginDefaults", pluginDefaults);
flowAsMap.put(PLUGIN_DEFAULTS_FIELD, pluginDefaults);
}
return yamlParser.parse(flowAsMap, Flow.class, false);
return flowAsMap;
}
/**
@@ -260,7 +444,7 @@ public class PluginDefaultService {
* validation will be disabled as we cannot differentiate between a prefix or an unknown type.
*/
public List<String> validateDefault(PluginDefault pluginDefault) {
Class<? extends Plugin> classByIdentifier = pluginRegistry.findClassByIdentifier(pluginDefault.getType());
Class<? extends Plugin> classByIdentifier = getClassByIdentifier(pluginDefault);
if (classByIdentifier == null) {
// this can either be a prefix or a non-existing plugin, in both cases we cannot validate in detail
return Collections.emptyList();
@@ -283,6 +467,10 @@ public class PluginDefaultService {
.toList();
}
protected Class<? extends Plugin> getClassByIdentifier(PluginDefault pluginDefault) {
return pluginRegistry.findClassByIdentifier(pluginDefault.getType());
}
private Map<String, List<PluginDefault>> pluginDefaultsToMap(List<PluginDefault> pluginDefaults) {
return pluginDefaults
.stream()
@@ -292,7 +480,7 @@ public class PluginDefaultService {
private void addAliases(List<PluginDefault> allDefaults) {
List<PluginDefault> aliasedPluginDefault = allDefaults.stream()
.map(pluginDefault -> {
Class<? extends Plugin> classByIdentifier = pluginRegistry.findClassByIdentifier(pluginDefault.getType());
Class<? extends Plugin> classByIdentifier = getClassByIdentifier(pluginDefault);
return classByIdentifier != null && !pluginDefault.getType().equals(classByIdentifier.getTypeName()) ? pluginDefault.toBuilder().type(classByIdentifier.getTypeName()).build() : null;
})
.filter(Objects::nonNull)
@@ -357,4 +545,42 @@ public class PluginDefaultService {
return result;
}
// -----------------------------------------------------------------------------------------------------------------
// DEPRECATED
// -----------------------------------------------------------------------------------------------------------------
/**
* @deprecated use {@link #injectAllDefaults(FlowInterface, Logger)} instead
*/
@Deprecated(forRemoval = true, since = "0.20")
public Flow injectDefaults(Flow flow, Logger logger) {
try {
return this.injectDefaults(flow);
} catch (Exception e) {
logger.warn(
"Can't inject plugin defaults on tenant {}, namespace '{}', flow '{}' with errors '{}'",
flow.getTenantId(),
flow.getNamespace(),
flow.getId(),
e.getMessage(),
e
);
return flow;
}
}
/**
* @deprecated use {@link #injectAllDefaults(FlowInterface, boolean)} instead
*/
@Deprecated(forRemoval = true, since = "0.20")
public Flow injectDefaults(Flow flow) throws ConstraintViolationException {
if (flow instanceof FlowWithSource flowWithSource) {
return this.injectAllDefaults(flowWithSource, false);
}
Map<String, Object> mapFlow = NON_DEFAULT_OBJECT_MAPPER.convertValue(flow, JacksonMapper.MAP_TYPE_REFERENCE);
mapFlow = innerInjectDefault(flow.getTenantId(), flow.getNamespace(), mapFlow, false);
return YamlParser.parse(mapFlow, Flow.class, false);
}
}

View File

@@ -1,8 +1,11 @@
package io.kestra.core.topologies;
import com.google.common.annotations.VisibleForTesting;
import io.kestra.core.models.Label;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.hierarchies.Graph;
import io.kestra.core.models.tasks.ExecutableTask;
@@ -140,7 +143,8 @@ public class FlowTopologyService {
}
@Nullable
public FlowRelation isChild(FlowWithSource parent, FlowWithSource child) {
@VisibleForTesting
public FlowRelation isChild(Flow parent, Flow child) {
if (this.isFlowTaskChild(parent, child)) {
return FlowRelation.FLOW_TASK;
}
@@ -152,7 +156,7 @@ public class FlowTopologyService {
return null;
}
protected boolean isFlowTaskChild(FlowWithSource parent, FlowWithSource child) {
protected boolean isFlowTaskChild(Flow parent, Flow child) {
try {
return parent
.allTasksWithChilds()
@@ -168,7 +172,7 @@ public class FlowTopologyService {
}
}
protected boolean isTriggerChild(FlowWithSource parent, FlowWithSource child) {
protected boolean isTriggerChild(Flow parent, Flow child) {
List<AbstractTrigger> triggers = ListUtils.emptyOnNull(child.getTriggers());
// simulated execution: we add a "simulated" label so conditions can know that the evaluation is for a simulated execution
@@ -196,7 +200,7 @@ public class FlowTopologyService {
return conditionMatch && preconditionMatch;
}
private boolean validateCondition(Condition condition, FlowWithSource child, Execution execution) {
private boolean validateCondition(Condition condition, FlowInterface child, Execution execution) {
if (isFilterCondition(condition)) {
return true;
}
@@ -208,7 +212,7 @@ public class FlowTopologyService {
return this.conditionService.isValid(condition, child, execution);
}
private boolean validateMultipleConditions(Map<String, Condition> multipleConditions, FlowWithSource child, Execution execution) {
private boolean validateMultipleConditions(Map<String, Condition> multipleConditions, FlowInterface child, Execution execution) {
List<Condition> conditions = multipleConditions
.values()
.stream()

View File

@@ -6,6 +6,7 @@ import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.RunnableTask;
@@ -92,7 +93,7 @@ public class Resume extends Task implements RunnableTask<VoidOutput> {
Execution execution = executionRepository.findById(executionInfo.tenantId(), executionInfo.id())
.orElseThrow(() -> new IllegalArgumentException("No execution found for execution id " + executionInfo.id()));
Flow flow = flowExecutor.findByExecution(execution).orElseThrow(() -> new IllegalArgumentException("Flow not found for execution id " + executionInfo.id()));
FlowInterface flow = flowExecutor.findByExecution(execution).orElseThrow(() -> new IllegalArgumentException("Flow not found for execution id " + executionInfo.id()));
Map<String, Object> renderedInputs = runContext.render(this.inputs).asMap(String.class, Object.class);
renderedInputs = !renderedInputs.isEmpty() ? renderedInputs : null;

View File

@@ -14,6 +14,7 @@ import io.kestra.core.models.executions.NextTaskRun;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.executions.TaskRunAttempt;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.hierarchies.GraphCluster;
import io.kestra.core.models.hierarchies.RelationType;
@@ -531,7 +532,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
public Optional<SubflowExecutionResult> createSubflowExecutionResult(
RunContext runContext,
TaskRun taskRun,
Flow flow,
FlowInterface flow,
Execution execution
) {

View File

@@ -8,6 +8,7 @@ import io.kestra.core.models.annotations.Example;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.executions.TaskRunAttempt;
@@ -197,7 +198,7 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
public Optional<SubflowExecutionResult> createSubflowExecutionResult(
RunContext runContext,
TaskRun taskRun,
io.kestra.core.models.flows.Flow flow,
FlowInterface flow,
Execution execution
) {
// we only create a worker task result when the execution is terminated

View File

@@ -11,6 +11,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.NextTaskRun;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.hierarchies.GraphCluster;
import io.kestra.core.models.hierarchies.RelationType;
import io.kestra.core.models.tasks.FlowableTask;
@@ -248,7 +249,7 @@ public class Template extends Task implements FlowableTask<Template.Output> {
}
@SuppressWarnings("deprecated")
public static Flow injectTemplate(Flow flow, Execution execution, TriFunction<String, String, String, io.kestra.core.models.templates.Template> provider) throws InternalException {
public static FlowWithSource injectTemplate(Flow flow, Execution execution, TriFunction<String, String, String, io.kestra.core.models.templates.Template> provider) throws InternalException {
AtomicReference<Flow> flowReference = new AtomicReference<>(flow);
boolean haveTemplate = true;
@@ -282,7 +283,8 @@ public class Template extends Task implements FlowableTask<Template.Output> {
haveTemplate = !templates.isEmpty();
}
return flowReference.get();
Flow f = flowReference.get();
return FlowWithSource.of(f, f.sourceOrGenerateIfNull());
}
/**

View File

@@ -0,0 +1,29 @@
package io.kestra.core.models.flows;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
class FlowIdTest {
@Test
void shouldGetUidWithoutRevision() {
String id = FlowId.uidWithoutRevision("tenant", "io.kestra.unittest", "flow-id");
assertThat(id, is("tenant_io.kestra.unittest_flow-id"));
}
@Test
void shouldGetUidGivenEmptyRevision() {
String id = FlowId.uid("tenant", "io.kestra.unittest", "flow-id", Optional.empty());
assertThat(id, is("tenant_io.kestra.unittest_flow-id_-1"));
}
@Test
void shouldGetUidGivenRevision() {
String id = FlowId.uid("tenant", "io.kestra.unittest", "flow-id", Optional.of(42));
assertThat(id, is("tenant_io.kestra.unittest_flow-id_42"));
}
}

View File

@@ -25,8 +25,6 @@ import static org.hamcrest.Matchers.*;
@KestraTest
class FlowTest {
@Inject
YamlParser yamlParser = new YamlParser();
@Inject
ModelValidator modelValidator;
@@ -197,6 +195,6 @@ class FlowTest {
File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class);
return YamlParser.parse(file, Flow.class);
}
}

View File

@@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.*;
class FlowWithSourceTest {
@Test
void source() throws JsonProcessingException {
var flow = Flow.builder()
FlowWithSource flow = FlowWithSource.builder()
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.tasks(List.of(
@@ -37,9 +37,9 @@ class FlowWithSourceTest {
))
.build();
FlowWithSource flowWithSource = FlowWithSource.of(flow, flow.generateSource());
flow = flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
String source = flowWithSource.getSource();
String source = flow.getSource();
assertThat(source, not(containsString("deleted: false")));
assertThat(source, containsString("format: |\n"));
@@ -60,7 +60,7 @@ class FlowWithSourceTest {
.triggers(List.of(Schedule.builder().id("schedule").cron("0 1 9 * * *").build()));
FlowWithSource flow = builder
.source(JacksonMapper.ofYaml().writeValueAsString(builder.build().toFlow()))
.source(JacksonMapper.ofYaml().writeValueAsString(builder.build()))
.build();
String source = flow.getSource();
@@ -73,7 +73,7 @@ class FlowWithSourceTest {
@Test
void of() {
// test that all fields are transmitted to FlowWithSource
Flow flow = Flow.builder()
FlowWithSource flow = FlowWithSource.builder()
.tenantId("tenantId")
.id(IdUtils.create())
.namespace("io.kestra.unittest")
@@ -132,7 +132,7 @@ class FlowWithSourceTest {
.build()
)
.build();
String expectedSource = flow.generateSource() + " # additional comment";
String expectedSource = flow.sourceOrGenerateIfNull() + " # additional comment";
FlowWithSource of = FlowWithSource.of(flow, expectedSource);
assertThat(of.equalsWithoutRevision(flow), is(true));

View File

@@ -36,8 +36,6 @@ import static org.hamcrest.Matchers.*;
@KestraTest(startRunner = true)
class FlowGraphTest {
@Inject
private YamlParser yamlParser = new YamlParser();
@Inject
private GraphService graphService;
@@ -379,7 +377,7 @@ class FlowGraphTest {
File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class).withSource(Files.readString(file.toPath()));
return YamlParser.parse(file, FlowWithSource.class).toBuilder().source(Files.readString(file.toPath())).build();
}
private static AbstractGraph node(FlowGraph flowGraph, String taskId) {

View File

@@ -1,10 +1,9 @@
package io.kestra.core.repositories;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.ImmutableList;
import io.kestra.core.Helpers;
import io.kestra.core.events.CrudEvent;
import io.kestra.core.events.CrudEventType;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.SearchResult;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.*;
@@ -12,12 +11,9 @@ import io.kestra.core.models.flows.input.StringInput;
import io.kestra.core.models.property.Property;
import io.kestra.core.queues.QueueException;
import io.kestra.core.schedulers.AbstractSchedulerTest;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.FlowService;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.plugin.core.debug.Return;
import io.kestra.plugin.core.flow.Template;
import io.kestra.plugin.core.log.Log;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.TestsUtils;
@@ -50,6 +46,9 @@ import static org.mockito.Mockito.spy;
@KestraTest
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public abstract class AbstractFlowRepositoryTest {
public static final String TEST_TENANT_ID = "tenant";
public static final String TEST_NAMESPACE = "io.kestra.unittest";
public static final String TEST_FLOW_ID = "test";
@Inject
protected FlowRepositoryInterface flowRepository;
@@ -59,32 +58,29 @@ public abstract class AbstractFlowRepositoryTest {
@Inject
private LocalFlowRepositoryLoader repositoryLoader;
@Inject
protected PluginDefaultService pluginDefaultService;
@BeforeEach
protected void init() throws IOException, URISyntaxException {
TestsUtils.loads(repositoryLoader);
FlowListener.reset();
}
private static Flow.FlowBuilder<?, ?> builder() {
return builder(IdUtils.create(), "test");
private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder() {
return builder(IdUtils.create(), TEST_FLOW_ID);
}
private static Flow.FlowBuilder<?, ?> builder(String flowId, String taskId) {
return Flow.builder()
private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder(String flowId, String taskId) {
return FlowWithSource.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.tasks(Collections.singletonList(Return.builder().id(taskId).type(Return.class.getName()).format(Property.of("test")).build()));
.namespace(TEST_NAMESPACE)
.tasks(Collections.singletonList(Return.builder().id(taskId).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()));
}
@Test
void findById() {
Flow flow = builder()
FlowWithSource flow = builder()
.revision(3)
.build();
flow = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
flow = flowRepository.create(GenericFlow.of(flow));
try {
Optional<Flow> full = flowRepository.findById(null, flow.getNamespace(), flow.getId());
assertThat(full.isPresent(), is(true));
@@ -99,10 +95,10 @@ public abstract class AbstractFlowRepositoryTest {
@Test
void findByIdWithoutAcl() {
Flow flow = builder()
FlowWithSource flow = builder()
.revision(3)
.build();
flow = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
flow = flowRepository.create(GenericFlow.of(flow));
try {
Optional<Flow> full = flowRepository.findByIdWithoutAcl(null, flow.getNamespace(), flow.getId(), Optional.empty());
assertThat(full.isPresent(), is(true));
@@ -117,10 +113,11 @@ public abstract class AbstractFlowRepositoryTest {
@Test
void findByIdWithSource() {
Flow flow = builder()
FlowWithSource flow = builder()
.revision(3)
.build();
flow = flowRepository.create(flow, "# comment\n" + flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
String source = "# comment\n" + flow.sourceOrGenerateIfNull();
flow = flowRepository.create(GenericFlow.fromYaml(null, source));
try {
Optional<FlowWithSource> full = flowRepository.findByIdWithSource(null, flow.getNamespace(), flow.getId());
@@ -136,96 +133,10 @@ public abstract class AbstractFlowRepositoryTest {
}
}
@Test
protected void revision() throws JsonProcessingException {
String flowId = IdUtils.create();
// create with builder
Flow first = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("a").build()))
.build();
// create with repository
FlowWithSource flow = flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource())));
List<FlowWithSource> revisions;
try {
// submit new one, no change
Flow notSaved = flowRepository.update(flow, flow, first.generateSource(), pluginDefaultService.injectDefaults(flow));
assertThat(notSaved.getRevision(), is(flow.getRevision()));
// submit new one with change
Flow flowRev2 = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.tasks(Collections.singletonList(
Log.builder()
.id(IdUtils.create())
.type(Log.class.getName())
.message("Hello World")
.build()
))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("b").build()))
.build();
// revision is incremented
FlowWithSource incremented = flowRepository.update(flowRev2, flow, flowRev2.generateSource(), pluginDefaultService.injectDefaults(flowRev2.withSource(flowRev2.generateSource())));
assertThat(incremented.getRevision(), is(2));
// revision is well saved
revisions = flowRepository.findRevisions(null, flow.getNamespace(), flow.getId());
assertThat(revisions.size(), is(2));
// submit the same one serialized, no changed
FlowWithSource incremented2 = flowRepository.update(
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flowRev2), Flow.class),
flowRev2,
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flowRev2), Flow.class).generateSource(),
pluginDefaultService.injectDefaults(flowRev2.withSource(flowRev2.generateSource()))
);
assertThat(incremented2.getRevision(), is(2));
// resubmit first one, revision is incremented
FlowWithSource incremented3 = flowRepository.update(
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flow.toFlow()), Flow.class),
flowRev2,
JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flow.toFlow()), Flow.class).generateSource(),
pluginDefaultService.injectDefaults(JacksonMapper.ofJson().readValue(JacksonMapper.ofJson().writeValueAsString(flow.toFlow()), Flow.class).withSource(flow.getSource()))
);
assertThat(incremented3.getRevision(), is(3));
} finally {
deleteFlow(flow);
}
// revisions is still findable after delete
revisions = flowRepository.findRevisions(null, flow.getNamespace(), flow.getId());
assertThat(revisions.size(), is(4));
Optional<Flow> findDeleted = flowRepository.findById(
null,
flow.getNamespace(),
flow.getId(),
Optional.of(flow.getRevision())
);
assertThat(findDeleted.isPresent(), is(true));
assertThat(findDeleted.get().getRevision(), is(flow.getRevision()));
// recreate the first one, we have a new revision
Flow incremented4 = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow));
try {
assertThat(incremented4.getRevision(), is(5));
} finally {
deleteFlow(incremented4);
}
}
@Test
void save() {
Flow flow = builder().revision(12).build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
FlowWithSource flow = builder().revision(12).build();
FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
try {
assertThat(save.getRevision(), is(1));
@@ -236,8 +147,8 @@ public abstract class AbstractFlowRepositoryTest {
@Test
void saveNoRevision() {
Flow flow = builder().build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
FlowWithSource flow = builder().build();
FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
try {
assertThat(save.getRevision(), is(1));
@@ -304,8 +215,8 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = builder()
.revision(3)
.build();
String flowSource = "# comment\n" + flow.generateSource();
flow = flowRepository.create(flow, flowSource, pluginDefaultService.injectDefaults(flow.withSource(flowSource)));
String flowSource = "# comment\n" + flow.sourceOrGenerateIfNull();
flow = flowRepository.create(GenericFlow.fromYaml(null, flowSource));
try {
List<FlowWithSource> save = flowRepository.findByNamespaceWithSource(null, flow.getNamespace());
@@ -360,7 +271,7 @@ public abstract class AbstractFlowRepositoryTest {
void delete() {
Flow flow = builder().build();
FlowWithSource save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
try {
assertThat(flowRepository.findById(null, save.getNamespace(), save.getId()).isPresent(), is(true));
@@ -384,12 +295,12 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("a").build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.namespace(TEST_NAMESPACE)
.inputs(List.of(StringInput.builder().type(Type.STRING).id("a").build()))
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
Flow save = flowRepository.create(GenericFlow.of(flow));
try {
assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true));
@@ -397,14 +308,14 @@ public abstract class AbstractFlowRepositoryTest {
Flow update = Flow.builder()
.id(IdUtils.create())
.namespace("io.kestra.unittest2")
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("b").build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.inputs(List.of(StringInput.builder().type(Type.STRING).id("b").build()))
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build();
;
ConstraintViolationException e = assertThrows(
ConstraintViolationException.class,
() -> flowRepository.update(update, flow, update.generateSource(), pluginDefaultService.injectDefaults(update.withSource(update.generateSource())))
() -> flowRepository.update(GenericFlow.of(update), flow)
);
assertThat(e.getConstraintViolations().size(), is(2));
@@ -419,26 +330,26 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.namespace(TEST_NAMESPACE)
.triggers(Collections.singletonList(AbstractSchedulerTest.UnitTest.builder()
.id("sleep")
.type(AbstractSchedulerTest.UnitTest.class.getName())
.build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build();
flow = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
flow = flowRepository.create(GenericFlow.of(flow));
try {
assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true));
Flow update = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.namespace(TEST_NAMESPACE)
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build();
;
Flow updated = flowRepository.update(update, flow, update.generateSource(), pluginDefaultService.injectDefaults(update.withSource(update.generateSource())));
Flow updated = flowRepository.update(GenericFlow.of(update), flow);
assertThat(updated.getTriggers(), is(nullValue()));
} finally {
deleteFlow(flow);
@@ -457,15 +368,15 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.namespace(TEST_NAMESPACE)
.triggers(Collections.singletonList(AbstractSchedulerTest.UnitTest.builder()
.id("sleep")
.type(AbstractSchedulerTest.UnitTest.class.getName())
.build()))
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.of(TEST_FLOW_ID)).build()))
.build();
Flow save = flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
Flow save = flowRepository.create(GenericFlow.of(flow));
try {
assertThat(flowRepository.findById(null, flow.getNamespace(), flow.getId()).isPresent(), is(true));
} finally {
@@ -489,7 +400,7 @@ public abstract class AbstractFlowRepositoryTest {
Template template = Template.builder()
.id(IdUtils.create())
.type(Template.class.getName())
.namespace("test")
.namespace(TEST_FLOW_ID)
.templateId("testTemplate")
.build();
@@ -501,15 +412,11 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = Flow.builder()
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.namespace(TEST_NAMESPACE)
.tasks(Collections.singletonList(templateSpy))
.build();
flow = flowRepository.create(
flow,
flow.generateSource(),
flow
);
flow = flowRepository.create(GenericFlow.of(flow));
try {
Optional<Flow> found = flowRepository.findById(null, flow.getNamespace(), flow.getId());
@@ -523,44 +430,173 @@ public abstract class AbstractFlowRepositoryTest {
}
@Test
protected void lastRevision() {
String namespace = "io.kestra.unittest";
String flowId = IdUtils.create();
String tenantId = "tenant";
assertThat(flowRepository.lastRevision(tenantId, namespace, flowId), nullValue());
// create with builder
Flow first = Flow.builder()
.tenantId(tenantId)
.id(flowId)
.namespace(namespace)
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.of("test")).build()))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("a").build()))
.build();
// create with repository
first = flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource())));
try {
assertThat(flowRepository.lastRevision(tenantId, namespace, flowId), is(1));
// submit new one with change
Flow flowRev2 = first.toBuilder()
.tasks(Collections.singletonList(
Log.builder()
.id(IdUtils.create())
.type(Log.class.getName())
.message("Hello World")
.build()
))
.inputs(ImmutableList.of(StringInput.builder().type(Type.STRING).id("b").build()))
.build();
first = flowRepository.update(flowRev2, first, flowRev2.generateSource(), pluginDefaultService.injectDefaults(flowRev2.withSource(flowRev2.generateSource())));
assertThat(flowRepository.lastRevision(tenantId, namespace, flowId), is(2));
} finally {
deleteFlow(first);
protected void shouldReturnNullRevisionForNonExistingFlow() {
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, IdUtils.create()), nullValue());
}
@Test
protected void shouldReturnLastRevisionOnCreate() {
// Given
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// When
toDelete.add(flowRepository.create(createTestingLogFlow(flowId, "???")));
Integer result = flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId);
// Then
assertThat(result, is(1));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(1));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldIncrementRevisionOnDelete() {
// Given
final String flowId = IdUtils.create();
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(1));
// When
flowRepository.delete(created);
// Then
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(2));
}
@Test
protected void shouldIncrementRevisionOnCreateAfterDelete() {
// Given
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
flowRepository.delete(
flowRepository.create(createTestingLogFlow(flowId, "first"))
);
// When
toDelete.add(flowRepository.create(createTestingLogFlow(flowId, "second")));
// Then
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(3));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(3));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldReturnNullForLastRevisionAfterDelete() {
// Given
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
toDelete.add(updated);
// When
flowRepository.delete(updated);
// Then
assertThat(flowRepository.findById(TEST_TENANT_ID, TEST_NAMESPACE, flowId, Optional.empty()), is(Optional.empty()));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(nullValue()));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldFindAllRevisionsAfterDelete() {
// Given
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
toDelete.add(updated);
// When
flowRepository.delete(updated);
// Then
assertThat(flowRepository.findById(TEST_TENANT_ID, TEST_NAMESPACE, flowId, Optional.empty()), is(Optional.empty()));
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size(), is(3));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldIncrementRevisionOnUpdateGivenNotEqualSource() {
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
// When
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
toDelete.add(updated);
// Then
assertThat(updated.getRevision(), is(2));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(2));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
protected void shouldNotIncrementRevisionOnUpdateGivenEqualSource() {
final List<Flow> toDelete = new ArrayList<>();
final String flowId = IdUtils.create();
try {
// Given
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
toDelete.add(created);
// When
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "first"), created);
toDelete.add(updated);
// Then
assertThat(updated.getRevision(), is(1));
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId), is(1));
} finally {
toDelete.forEach(this::deleteFlow);
}
}
@Test
void shouldReturnForFindGivenQueryWildcard() {
ArrayListTotal<Flow> flows = flowRepository.find(Pageable.from(1, 10), "*", null, null, null, Map.of());
assertThat(flows.size(), is(10));
assertThat(flows.getTotal(), is(Helpers.FLOWS_COUNT));
}
@Test
void shouldReturnForGivenQueryWildCardFilters() {
List<QueryFilter> filters = List.of(
QueryFilter.builder().field(QueryFilter.Field.QUERY).operation(QueryFilter.Op.EQUALS).value("*").build()
);
ArrayListTotal<Flow> flows = flowRepository.find(Pageable.from(1, 10), null, filters);
assertThat(flows.size(), is(10));
assertThat(flows.getTotal(), is(Helpers.FLOWS_COUNT));
}
@Test
@@ -568,7 +604,7 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = builder()
.revision(1)
.build();
flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow));
flowRepository.create(GenericFlow.of(flow));
Execution execution = Execution.builder()
.id(IdUtils.create())
.namespace(flow.getNamespace())
@@ -599,7 +635,7 @@ public abstract class AbstractFlowRepositoryTest {
Flow flow = builder()
.revision(3)
.build();
flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow));
flowRepository.create(GenericFlow.of(flow));
Execution execution = Execution.builder()
.id(IdUtils.create())
.namespace(flow.getNamespace())
@@ -629,8 +665,8 @@ public abstract class AbstractFlowRepositoryTest {
FlowWithSource toDelete = null;
try {
// Given
Flow flow = createTestFlowForNamespace("io.kestra.unittest");
toDelete = flowRepository.create(flow, "", flow);
Flow flow = createTestFlowForNamespace(TEST_NAMESPACE);
toDelete = flowRepository.create(GenericFlow.of(flow));
// When
int count = flowRepository.count(null);
@@ -647,14 +683,14 @@ public abstract class AbstractFlowRepositoryTest {
void shouldCountForNullTenantGivenNamespace() {
List<FlowWithSource> toDelete = new ArrayList<>();
try {
toDelete.add(flowRepository.create(createTestFlowForNamespace("io.kestra.unittest.sub"), "", createTestFlowForNamespace("io.kestra.unittest.sub")));
toDelete.add(flowRepository.create(createTestFlowForNamespace("io.kestra.unittest.shouldcountbynamespacefornulltenant"), "", createTestFlowForNamespace("io.kestra.unittest.shouldcountbynamespacefornulltenant")));
toDelete.add(flowRepository.create(createTestFlowForNamespace("com.kestra.unittest"), "", createTestFlowForNamespace("com.kestra.unittest")));
toDelete.add(flowRepository.create(GenericFlow.of(createTestFlowForNamespace("io.kestra.unittest.sub"))));
toDelete.add(flowRepository.create(GenericFlow.of(createTestFlowForNamespace("io.kestra.unittest.shouldcountbynamespacefornulltenant"))));
toDelete.add(flowRepository.create(GenericFlow.of(createTestFlowForNamespace("com.kestra.unittest"))));
int count = flowRepository.countForNamespace(null, "io.kestra.unittest.shouldcountbynamespacefornulltenant");
assertThat(count, is(1));
count = flowRepository.countForNamespace(null, "io.kestra.unittest");
count = flowRepository.countForNamespace(null, TEST_NAMESPACE);
assertThat(count, is(2));
} finally {
for (FlowWithSource flow : toDelete) {
@@ -676,8 +712,12 @@ public abstract class AbstractFlowRepositoryTest {
}
private void deleteFlow(Flow flow) {
Integer revision = flowRepository.lastRevision(flow.getTenantId(), flow.getNamespace(), flow.getId());
flowRepository.delete(flow.toBuilder().revision(revision).build().withSource(flow.generateSource()));
if (flow == null) {
return;
}
flowRepository
.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId())
.ifPresent(delete -> flowRepository.delete(flow.toBuilder().revision(null).build()));
}
@Singleton
@@ -694,4 +734,17 @@ public abstract class AbstractFlowRepositoryTest {
emits = new ArrayList<>();
}
}
private static GenericFlow createTestingLogFlow(String id, String logMessage) {
String source = """
id: %s
namespace: %s
tasks:
- id: log
type: io.kestra.plugin.core.log.Log
message: %s
""".formatted(id, TEST_NAMESPACE, logMessage);
return GenericFlow.fromYaml(TEST_TENANT_ID, source);
}
}

View File

@@ -1,10 +1,13 @@
package io.kestra.core.runners;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.TestsUtils;
@@ -266,14 +269,17 @@ public class DeserializationIssuesCaseTest {
assertThat(workerTriggerResult.get().getSuccess(), is(Boolean.FALSE));
}
public void flowDeserializationIssue(Consumer<QueueMessage> sendToQueue) throws TimeoutException, QueueException{
public void flowDeserializationIssue(Consumer<QueueMessage> sendToQueue) throws Exception {
AtomicReference<List<FlowWithSource>> flows = new AtomicReference<>();
flowListeners.listen(newFlows -> flows.set(newFlows));
flowListeners.listen(flows::set);
sendToQueue.accept(new QueueMessage(FlowWithSource.class, INVALID_FLOW_KEY, INVALID_FLOW_VALUE));
sendToQueue.accept(new QueueMessage(FlowInterface.class, INVALID_FLOW_KEY, INVALID_FLOW_VALUE));
Await.until(
() -> flows.get() != null && flows.get().stream().anyMatch(newFlow -> newFlow.uid().equals("company.team_hello-world_2") && (newFlow.getTasks() == null || newFlow.getTasks().isEmpty())),
() -> flows.get() != null && flows.get()
.stream()
.anyMatch(newFlow -> newFlow.uid().equals("company.team_hello-world_2"))
,
Duration.ofMillis(100),
Duration.ofMinutes(1)
);

View File

@@ -9,13 +9,12 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.repositories.LogRepositoryInterface;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.services.ExecutionService;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.utils.Await;
import io.kestra.plugin.core.debug.Return;
import jakarta.inject.Inject;
@@ -43,9 +42,6 @@ class ExecutionServiceTest {
@Inject
FlowRepositoryInterface flowRepository;
@Inject
PluginDefaultService pluginDefaultService;
@Inject
ExecutionRepositoryInterface executionRepository;
@@ -83,7 +79,7 @@ class ExecutionServiceTest {
FlowWithSource flow = flowRepository.findByIdWithSource(null, "io.kestra.tests", "restart_last_failed").orElseThrow();
flowRepository.update(
flow,
GenericFlow.of(flow),
flow.updateTask(
"a",
Return.builder()
@@ -91,9 +87,7 @@ class ExecutionServiceTest {
.type(Return.class.getName())
.format(Property.of("replace"))
.build()
),
JacksonMapper.ofYaml().writeValueAsString(flow),
pluginDefaultService.injectDefaults(flow)
)
);

View File

@@ -1,11 +1,11 @@
package io.kestra.core.runners;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.junit.annotations.KestraTest;
import lombok.SneakyThrows;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.plugin.core.debug.Return;
@@ -25,11 +25,8 @@ abstract public class FlowListenersTest {
@Inject
protected FlowRepositoryInterface flowRepository;
@Inject
protected PluginDefaultService pluginDefaultService;
protected static FlowWithSource create(String flowId, String taskId) {
Flow flow = Flow.builder()
FlowWithSource flow = FlowWithSource.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.revision(1)
@@ -39,7 +36,7 @@ abstract public class FlowListenersTest {
.format(Property.of("test"))
.build()))
.build();
return flow.withSource(flow.generateSource());
return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
}
public void suite(FlowListenersInterface flowListenersService) {
@@ -72,44 +69,44 @@ abstract public class FlowListenersTest {
FlowWithSource firstUpdated = create(first.getId(), "test2");
flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource())));
flowRepository.create(GenericFlow.of(first));
wait(ref, () -> {
assertThat(count.get(), is(1));
assertThat(flowListenersService.flows().size(), is(1));
});
// create the same id than first, no additional flows
first = flowRepository.update(firstUpdated, first, firstUpdated.generateSource(), pluginDefaultService.injectDefaults(firstUpdated.withSource(firstUpdated.generateSource())));
first = flowRepository.update(GenericFlow.of(firstUpdated), first);
wait(ref, () -> {
assertThat(count.get(), is(1));
assertThat(flowListenersService.flows().size(), is(1));
assertThat(flowListenersService.flows().getFirst().getTasks().getFirst().getId(), is("test2"));
//assertThat(flowListenersService.flows().getFirst().getFirst().getId(), is("test2"));
});
Flow second = create("second_" + IdUtils.create(), "test");
FlowWithSource second = create("second_" + IdUtils.create(), "test");
// create a new one
flowRepository.create(second, second.generateSource(), pluginDefaultService.injectDefaults(second.withSource(second.generateSource())));
flowRepository.create(GenericFlow.of(second));
wait(ref, () -> {
assertThat(count.get(), is(2));
assertThat(flowListenersService.flows().size(), is(2));
});
// delete first
Flow deleted = flowRepository.delete(first);
FlowWithSource deleted = flowRepository.delete(first);
wait(ref, () -> {
assertThat(count.get(), is(1));
assertThat(flowListenersService.flows().size(), is(1));
});
// restore must works
flowRepository.create(first, first.generateSource(), pluginDefaultService.injectDefaults(first.withSource(first.generateSource())));
flowRepository.create(GenericFlow.of(first));
wait(ref, () -> {
assertThat(count.get(), is(2));
assertThat(flowListenersService.flows().size(), is(2));
});
Flow withTenant = first.toBuilder().tenantId("some-tenant").build();
flowRepository.create(withTenant, withTenant.generateSource(), pluginDefaultService.injectDefaults(withTenant.withSource(withTenant.generateSource())));
FlowWithSource withTenant = first.toBuilder().tenantId("some-tenant").build();
flowRepository.create(GenericFlow.of(withTenant));
wait(ref, () -> {
assertThat(count.get(), is(3));
assertThat(flowListenersService.flows().size(), is(3));

View File

@@ -48,11 +48,11 @@ abstract public class AbstractSchedulerTest {
@Inject
protected ExecutionService executionService;
public static Flow createThreadFlow() {
public static FlowWithSource createThreadFlow() {
return createThreadFlow(null);
}
public static Flow createThreadFlow(String workerGroup) {
public static FlowWithSource createThreadFlow(String workerGroup) {
UnitTest schedule = UnitTest.builder()
.id("sleep")
.type(UnitTest.class.getName())
@@ -72,7 +72,7 @@ abstract public class AbstractSchedulerTest {
}
protected static FlowWithSource createFlow(List<AbstractTrigger> triggers, List<PluginDefault> list) {
Flow.FlowBuilder<?, ?> builder = Flow.builder()
FlowWithSource.FlowWithSourceBuilder<?, ?> builder = FlowWithSource.builder()
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.inputs(List.of(
@@ -107,8 +107,8 @@ abstract public class AbstractSchedulerTest {
builder.pluginDefaults(list);
}
Flow flow = builder.build();
return FlowWithSource.of(flow, flow.generateSource());
FlowWithSource flow = builder.build();
return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
}
protected static FlowWithSource createLongRunningFlow(List<AbstractTrigger> triggers, List<PluginDefault> list) {

View File

@@ -2,8 +2,10 @@ package io.kestra.core.schedulers;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.FlowListeners;
@@ -44,7 +46,7 @@ class SchedulerConditionTest extends AbstractSchedulerTest {
protected FlowRepositoryInterface flowRepository;
private static Flow createScheduleFlow() {
private static FlowWithSource createScheduleFlow() {
Schedule schedule = Schedule.builder()
.id("hourly")
.type(Schedule.class.getName())
@@ -72,8 +74,8 @@ class SchedulerConditionTest extends AbstractSchedulerTest {
SchedulerExecutionStateInterface executionRepositorySpy = spy(this.executionState);
CountDownLatch queueCount = new CountDownLatch(4);
Flow flow = createScheduleFlow();
flowRepository.create(flow, flow.generateSource(), flow);
FlowWithSource flow = createScheduleFlow();
flowRepository.create(GenericFlow.of(flow));
Trigger trigger = Trigger.builder()
.namespace(flow.getNamespace())
@@ -101,7 +103,7 @@ class SchedulerConditionTest extends AbstractSchedulerTest {
Flux<Execution> receive = TestsUtils.receive(executionQueue, throwConsumer(either -> {
Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, trigger, flow.withSource(flow.generateSource()));
terminateExecution(execution, trigger, flow);
queueCount.countDown();
if (queueCount.getCount() == 0) {

View File

@@ -1,7 +1,9 @@
package io.kestra.core.schedulers;
import io.kestra.core.models.Label;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.utils.TestsUtils;
import io.kestra.jdbc.runner.JdbcScheduler;
@@ -92,11 +94,11 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
// mock flow listener
FlowListeners flowListenersServiceSpy = spy(this.flowListenersService);
PollingTrigger pollingTrigger = createPollingTrigger(List.of(State.Type.FAILED)).build();
Flow flow = createPollingTriggerFlow(pollingTrigger)
FlowWithSource flow = createPollingTriggerFlow(pollingTrigger)
.toBuilder()
.tasks(List.of(Fail.builder().id("fail").type(Fail.class.getName()).build()))
.build();
flowRepository.create(flow, flow.generateSource(), flow);
flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow))
.when(flowListenersServiceSpy)
.flows();
@@ -115,7 +117,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
queueCount.countDown();
if (execution.getLeft().getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution.getLeft(), State.Type.FAILED, Trigger.of(flow, pollingTrigger), flow.withSource(flow.generateSource()));
terminateExecution(execution.getLeft(), State.Type.FAILED, Trigger.of(flow, pollingTrigger), flow);
}
}
}));
@@ -184,7 +186,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
}
}
private Flow createPollingTriggerFlow(PollingTrigger pollingTrigger) {
private FlowWithSource createPollingTriggerFlow(PollingTrigger pollingTrigger) {
return createFlow(Collections.singletonList(pollingTrigger));
}

View File

@@ -2,7 +2,9 @@ package io.kestra.core.schedulers;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.triggers.RecoverMissedSchedules;
import io.kestra.core.models.triggers.Trigger;
@@ -51,7 +53,7 @@ public class SchedulerScheduleOnDatesTest extends AbstractSchedulerTest {
));
}
private Flow createScheduleFlow(String zone, String triggerId) {
private FlowWithSource createScheduleFlow(String zone, String triggerId) {
var now = ZonedDateTime.now();
var before = now.minusSeconds(3).truncatedTo(ChronoUnit.SECONDS);
var after = now.plusSeconds(3).truncatedTo(ChronoUnit.SECONDS);
@@ -78,8 +80,8 @@ public class SchedulerScheduleOnDatesTest extends AbstractSchedulerTest {
Set<String> executionId = new HashSet<>();
// then flow should be executed 4 times
Flow flow = createScheduleFlow("Europe/Paris", "schedule");
flowRepository.create(flow, flow.generateSource(), flow);
FlowWithSource flow = createScheduleFlow("Europe/Paris", "schedule");
flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow))
.when(flowListenersServiceSpy)
@@ -107,7 +109,7 @@ public class SchedulerScheduleOnDatesTest extends AbstractSchedulerTest {
executionId.add(execution.getId());
if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, trigger, flow.withSource(flow.generateSource()));
terminateExecution(execution, trigger, flow);
}
assertThat(execution.getFlowId(), is(flow.getId()));
queueCount.countDown();

View File

@@ -5,6 +5,7 @@ import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.PluginDefault;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.utils.TestsUtils;
import io.kestra.jdbc.runner.JdbcScheduler;
@@ -101,7 +102,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
FlowWithSource invalid = createScheduleFlow("Asia/Delhi", "schedule", true);
FlowWithSource flow = createScheduleFlow("Europe/Paris", "schedule", false);
flowRepository.create(flow, flow.generateSource(), flow);
flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(invalid, flow))
.when(flowListenersServiceSpy)
.flows();
@@ -137,7 +138,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
executionId.add(execution.getId());
if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, trigger, flow.withSource(flow.generateSource()));
terminateExecution(execution, trigger, flow);
}
assertThat(execution.getFlowId(), is(flow.getId()));
queueCount.countDown();
@@ -430,7 +431,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.when(flowListenersServiceSpy)
.flows();
flowRepository.create(flow, flow.generateSource(), flow);
flowRepository.create(GenericFlow.of(flow));
// to avoid waiting too much before a trigger execution, we add a last trigger with a date now - 1m.
Trigger lastTrigger = Trigger
.builder()
@@ -453,7 +454,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
assertThat(execution.getFlowId(), is(flow.getId()));
if (execution.getState().getCurrent() == State.Type.CREATED) {
terminateExecution(execution, lastTrigger, flow.withSource(flow.generateSource()));
terminateExecution(execution, lastTrigger, flow);
}
queueCount.countDown();
}));
@@ -542,7 +543,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.build()
)
);
flowRepository.create(flow, flow.generateSource(), flow);
flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow))
.when(flowListenersServiceSpy)
.flows();
@@ -614,7 +615,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
.build()
)
);
flowRepository.create(flow, flow.generateSource(), flow);
flowRepository.create(GenericFlow.of(flow));
doReturn(List.of(flow))
.when(flowListenersServiceSpy)
.flows();
@@ -650,7 +651,7 @@ public class SchedulerScheduleTest extends AbstractSchedulerTest {
lastTrigger.getNextExecutionDate().plusMinutes(3).toInstant()
))))
.build()));
terminateExecution(terminated, lastTrigger, flow.withSource(flow.generateSource()));
terminateExecution(terminated, lastTrigger, flow);
}
queueCount.countDown();
}));

View File

@@ -3,8 +3,10 @@ package io.kestra.core.schedulers;
import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.FlowListeners;
import io.kestra.core.runners.TestMethodScopedWorker;
@@ -39,8 +41,8 @@ public class SchedulerThreadTest extends AbstractSchedulerTest {
@Test
void thread() throws Exception {
Flow flow = createThreadFlow();
flowRepository.create(flow, flow.generateSource(), flow);
FlowWithSource flow = createThreadFlow();
flowRepository.create(GenericFlow.of(flow));
CountDownLatch queueCount = new CountDownLatch(2);
// wait for execution
@@ -50,7 +52,7 @@ public class SchedulerThreadTest extends AbstractSchedulerTest {
assertThat(execution.getFlowId(), is(flow.getId()));
if (execution.getState().getCurrent() != State.Type.SUCCESS) {
terminateExecution(execution, Trigger.of(flow, flow.getTriggers().getFirst()), flow.withSource(flow.generateSource()));
terminateExecution(execution, Trigger.of(flow, flow.getTriggers().getFirst()), flow);
queueCount.countDown();
}
}));

View File

@@ -6,7 +6,9 @@ import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.ExecutionKilledTrigger;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.PollingTriggerInterface;
@@ -49,7 +51,7 @@ public class SchedulerTriggerChangeTest extends AbstractSchedulerTest {
@Inject
@Named(QueueFactoryInterface.FLOW_NAMED)
protected QueueInterface<FlowWithSource> flowQueue;
protected QueueInterface<FlowInterface> flowQueue;
@Inject
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
@@ -83,7 +85,7 @@ public class SchedulerTriggerChangeTest extends AbstractSchedulerTest {
)
.build();
return FlowWithSource.of(flow, flow.generateSource());
return FlowWithSource.of(flow, flow.getSource());
}
@Test
@@ -119,7 +121,7 @@ public class SchedulerTriggerChangeTest extends AbstractSchedulerTest {
// emit a flow trigger to be started
FlowWithSource flow = createFlow(Duration.ofSeconds(10));
flowRepository.create(flow, flow.generateSource(), flow);
flowRepository.create(GenericFlow.of(flow));
flowQueue.emit(flow);
Await.until(() -> STARTED_COUNT == 1, Duration.ofMillis(100), Duration.ofSeconds(30));

View File

@@ -35,9 +35,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
class YamlParserTest {
private static final ObjectMapper MAPPER = JacksonMapper.ofJson();
@Inject
private YamlParser yamlParser;
@Inject
private ModelValidator modelValidator;
@@ -213,7 +210,7 @@ class YamlParserTest {
TypeReference<Map<String, Object>> TYPE_REFERENCE = new TypeReference<>() {};
Map<String, Object> flow = JacksonMapper.ofYaml().readValue(flowSource, TYPE_REFERENCE);
Flow parse = yamlParser.parse(flow, Flow.class, false);
Flow parse = YamlParser.parse(flow, Flow.class, false);
assertThat(parse.getId(), is("duplicate"));
}
@@ -245,7 +242,7 @@ class YamlParserTest {
File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class);
return YamlParser.parse(file, Flow.class);
}
private Flow parseString(String path) throws IOException {
@@ -254,6 +251,6 @@ class YamlParserTest {
String input = Files.readString(Path.of(resource.getPath()), Charset.defaultCharset());
return yamlParser.parse(input, Flow.class);
return YamlParser.parse(input, Flow.class);
}
}

View File

@@ -1,8 +1,9 @@
package io.kestra.core.services;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.StringInput;
import io.kestra.core.models.property.Property;
@@ -10,7 +11,6 @@ import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.plugin.core.debug.Echo;
import io.kestra.plugin.core.debug.Return;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import org.junit.jupiter.api.Test;
import java.util.Collections;
@@ -21,25 +21,25 @@ import java.util.stream.Stream;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThrows;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertTrue;
@KestraTest
class FlowServiceTest {
private static final String TEST_NAMESPACE = "io.kestra.unittest";
@Inject
private FlowService flowService;
@Inject
private FlowRepositoryInterface flowRepository;
private static Flow create(String flowId, String taskId, Integer revision) {
return create(null, flowId, taskId, revision);
private static FlowWithSource create(String flowId, String taskId, Integer revision) {
return create(null, TEST_NAMESPACE, flowId, taskId, revision);
}
private static Flow create(String tenantId, String flowId, String taskId, Integer revision) {
return Flow.builder()
private static FlowWithSource create(String tenantId, String namespace, String flowId, String taskId, Integer revision) {
FlowWithSource flow = FlowWithSource.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.namespace(namespace)
.tenantId(tenantId)
.revision(revision)
.tasks(Collections.singletonList(Return.builder()
@@ -48,6 +48,8 @@ class FlowServiceTest {
.format(Property.of("test"))
.build()))
.build();
return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
}
@Test
@@ -59,7 +61,7 @@ class FlowServiceTest {
- id: task
type: io.kestra.plugin.core.log.Log
message: Hello""";
Flow importFlow = flowService.importFlow("my-tenant", source);
FlowWithSource importFlow = flowService.importFlow("my-tenant", source);
assertThat(importFlow.getId(), is("import"));
assertThat(importFlow.getNamespace(), is("some.namespace"));
@@ -93,7 +95,7 @@ class FlowServiceTest {
- id: task
type: io.kestra.plugin.core.log.Log
message: Hello""";
Flow importFlow = flowService.importFlow("my-tenant", oldSource);
FlowWithSource importFlow = flowService.importFlow("my-tenant", oldSource);
assertThat(importFlow.getId(), is("import_dry"));
assertThat(importFlow.getNamespace(), is("some.namespace"));
@@ -120,18 +122,14 @@ class FlowServiceTest {
@Test
void sameRevisionWithDeletedOrdered() {
var flow1 = create("test", "test", 1);
var flow2 = create("test", "test2", 2);
var flow3 = create("test", "test2", 2).toDeleted();
var flow4 = create("test", "test2", 4);
Stream<FlowWithSource> stream = Stream.of(
flow1.withSource(flow1.generateSource()),
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource())
Stream<FlowInterface> stream = Stream.of(
create("test", "test", 1),
create("test", "test2", 2),
create("test", "test2", 2).toDeleted(),
create("test", "test2", 4)
);
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList();
List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(1));
assertThat(collect.getFirst().isDeleted(), is(false));
@@ -140,20 +138,16 @@ class FlowServiceTest {
@Test
void sameRevisionWithDeletedSameRevision() {
var flow1 = create("test2", "test2", 1);
var flow2 = create("test", "test", 1);
var flow3 = create("test", "test2", 2);
var flow4 = create("test", "test3", 3);
var flow5 = create("test", "test2", 2).toDeleted();
Stream<FlowWithSource> stream = Stream.of(
flow1.withSource(flow1.generateSource()),
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource()),
flow5.withSource(flow5.generateSource())
Stream<FlowInterface> stream = Stream.of(
create("test2", "test2", 1),
create("test", "test", 1),
create("test", "test2", 2),
create("test", "test3", 3),
create("test", "test2", 2).toDeleted()
);
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList();
List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(1));
assertThat(collect.getFirst().isDeleted(), is(false));
@@ -162,18 +156,15 @@ class FlowServiceTest {
@Test
void sameRevisionWithDeletedUnordered() {
var flow1 = create("test", "test", 1);
var flow2 = create("test", "test2", 2);
var flow3 = create("test", "test2", 4);
var flow4 = create("test", "test2", 2).toDeleted();
Stream<FlowWithSource> stream = Stream.of(
flow1.withSource(flow1.generateSource()),
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource())
Stream<FlowInterface> stream = Stream.of(
create("test", "test", 1),
create("test", "test2", 2),
create("test", "test2", 4),
create("test", "test2", 2).toDeleted()
);
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList();
List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(1));
assertThat(collect.getFirst().isDeleted(), is(false));
@@ -182,22 +173,17 @@ class FlowServiceTest {
@Test
void multipleFlow() {
var flow1 = create("test", "test", 2);
var flow2 = create("test", "test2", 1);
var flow3 = create("test2", "test2", 1);
var flow4 = create("test2", "test3", 3);
var flow5 = create("test3", "test1", 2);
var flow6 = create("test3", "test2", 3);
Stream<FlowWithSource> stream = Stream.of(
flow1.withSource(flow1.generateSource()),
flow2.withSource(flow2.generateSource()),
flow3.withSource(flow3.generateSource()),
flow4.withSource(flow4.generateSource()),
flow5.withSource(flow5.generateSource()),
flow6.withSource(flow6.generateSource())
Stream<FlowInterface> stream = Stream.of(
create("test", "test", 2),
create("test", "test2", 1),
create("test2", "test2", 1),
create("test2", "test3", 3),
create("test3", "test1", 2),
create("test3", "test2", 3)
);
List<FlowWithSource> collect = flowService.keepLastVersion(stream).toList();
List<FlowInterface> collect = flowService.keepLastVersion(stream).toList();
assertThat(collect.size(), is(3));
assertThat(collect.stream().filter(flow -> flow.getId().equals("test")).findFirst().orElseThrow().getRevision(), is(2));
@@ -207,7 +193,7 @@ class FlowServiceTest {
@Test
void warnings() {
Flow flow = create("test", "test", 1).toBuilder()
FlowWithSource flow = create("test", "test", 1).toBuilder()
.namespace("system")
.triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder()
@@ -257,9 +243,9 @@ class FlowServiceTest {
@SuppressWarnings("deprecation")
@Test
void propertyRenamingDeprecation() {
Flow flow = Flow.builder()
FlowWithSource flow = FlowWithSource.builder()
.id("flowId")
.namespace("io.kestra.unittest")
.namespace(TEST_NAMESPACE)
.inputs(List.of(
StringInput.builder()
.id("inputWithId")
@@ -302,8 +288,8 @@ class FlowServiceTest {
@Test
void delete() {
Flow flow = create("deleteTest", "test", 1);
FlowWithSource saved = flowRepository.create(flow, flow.generateSource(), flow);
FlowWithSource flow = create("deleteTest", "test", 1);
FlowWithSource saved = flowRepository.create(GenericFlow.of(flow));
assertThat(flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId()).isPresent(), is(true));
flowService.delete(saved);
assertThat(flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId()).isPresent(), is(false));
@@ -311,26 +297,26 @@ class FlowServiceTest {
@Test
void findByNamespacePrefix() {
Flow flow = create("findByTest", "test", 1).toBuilder().namespace("some.namespace").build();
flowRepository.create(flow, flow.generateSource(), flow);
FlowWithSource flow = create(null, "some.namespace","findByTest", "test", 1);
flowRepository.create(GenericFlow.of(flow));
assertThat(flowService.findByNamespacePrefix(null, "some.namespace").size(), is(1));
}
@Test
void findById() {
Flow flow = create("findByIdTest", "test", 1);
FlowWithSource saved = flowRepository.create(flow, flow.generateSource(), flow);
FlowWithSource flow = create("findByIdTest", "test", 1);
FlowWithSource saved = flowRepository.create(GenericFlow.of(flow));
assertThat(flowService.findById(null, saved.getNamespace(), saved.getId()).isPresent(), is(true));
}
@Test
void checkSubflowNotFound() {
Flow flow = create("mainFlow", "task", 1).toBuilder()
FlowWithSource flow = create("mainFlow", "task", 1).toBuilder()
.tasks(List.of(
io.kestra.plugin.core.flow.Subflow.builder()
.id("subflowTask")
.type(io.kestra.plugin.core.flow.Subflow.class.getName())
.namespace("io.kestra.unittest")
.namespace(TEST_NAMESPACE)
.flowId("nonExistentSubflow")
.build()
))
@@ -344,15 +330,15 @@ class FlowServiceTest {
@Test
void checkValidSubflow() {
Flow subflow = create("existingSubflow", "task", 1);
flowRepository.create(subflow, subflow.generateSource(), subflow);
FlowWithSource subflow = create("existingSubflow", "task", 1);
flowRepository.create(GenericFlow.of(subflow));
Flow flow = create("mainFlow", "task", 1).toBuilder()
FlowWithSource flow = create("mainFlow", "task", 1).toBuilder()
.tasks(List.of(
io.kestra.plugin.core.flow.Subflow.builder()
.id("subflowTask")
.type(io.kestra.plugin.core.flow.Subflow.class.getName())
.namespace("io.kestra.unittest")
.namespace(TEST_NAMESPACE)
.flowId("existingSubflow")
.build()
))

View File

@@ -6,7 +6,9 @@ import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.flows.PluginDefault;
import io.kestra.core.models.tasks.RunnableTask;
import io.kestra.core.models.tasks.Task;
@@ -16,13 +18,15 @@ import io.kestra.core.models.triggers.PollingTriggerInterface;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.models.triggers.TriggerOutput;
import io.kestra.core.runners.RunContext;
import io.kestra.core.serializers.YamlParser;
import io.kestra.plugin.core.condition.Expression;
import io.kestra.plugin.core.log.Log;
import io.kestra.plugin.core.trigger.Schedule;
import jakarta.inject.Inject;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.*;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
@@ -62,16 +66,13 @@ class PluginDefaultServiceTest {
@Inject
private PluginDefaultService pluginDefaultService;
@Inject
private YamlParser yamlParser;
@Test
void shouldInjectGivenFlowWithNullSource() {
// Given
FlowWithSource flow = yamlParser.parse(TEST_LOG_FLOW_SOURCE, FlowWithSource.class);
FlowInterface flow = GenericFlow.fromYaml(null, TEST_LOG_FLOW_SOURCE);
// When
FlowWithSource result = pluginDefaultService.injectDefaults(flow);
FlowWithSource result = pluginDefaultService.injectAllDefaults(flow, true);
// Then
Log task = (Log) result.getTasks().getFirst();
@@ -158,7 +159,7 @@ class PluginDefaultServiceTest {
var previousGlobalDefault = pluginDefaultService.pluginGlobalDefault;
pluginDefaultService.pluginGlobalDefault = pluginGlobalDefaultConfiguration;
final Flow injected = pluginDefaultService.injectDefaults(flowWithPluginDefault);
final Flow injected = pluginDefaultService.injectAllDefaults(flowWithPluginDefault, true);
pluginDefaultService.pluginGlobalDefault = previousGlobalDefault;
assertThat(((DefaultPrecedenceTester) injected.getTasks().getFirst()).getPropFoo(), is(fooValue));
@@ -176,8 +177,8 @@ class PluginDefaultServiceTest {
}
@Test
void injectFlowAndGlobals() {
String source = """
public void injectFlowAndGlobals() {
String source = String.format("""
id: default-test
namespace: io.kestra.tests
@@ -190,27 +191,30 @@ class PluginDefaultServiceTest {
tasks:
- id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666""";
set: 666
FlowWithSource flow = yamlParser.parse(source, Flow.class)
.withSource(source)
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of(
"value", 1,
"set", 123,
"arrays", Collections.singletonList(1)
)),
new PluginDefault(DefaultTriggerTester.class.getName(), false, ImmutableMap.of(
"set", 123
)),
new PluginDefault(Expression.class.getName(), false, ImmutableMap.of(
"expression", "{{ test }}"
))
))
.build();
pluginDefaults:
- type: "%s"
forced: false
values:
set: 123
value: 1
arrays: [1]
- type: "%s"
forced: false
values:
set: 123
- type: "%s"
forced: false
values:
expression: "{{ test }}"
""",
DefaultTester.class.getName(),
DefaultTriggerTester.class.getName(),
Expression.class.getName()
);
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
FlowWithSource injected = pluginDefaultService.parseFlowWithAllDefaults(null, source, false);
assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(1));
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(666));
@@ -226,7 +230,8 @@ class PluginDefaultServiceTest {
}
@Test
public void forced() {
public void shouldInjectForcedDefaultsGivenForcedTrue() {
// Given
String source = """
id: default-test
namespace: io.kestra.tests
@@ -234,33 +239,35 @@ class PluginDefaultServiceTest {
tasks:
- id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666""";
set: 1
FlowWithSource flow = yamlParser.parse(source, Flow.class)
.withSource(source)
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(DefaultTester.class.getName(), true, ImmutableMap.of(
"set", 123
)),
new PluginDefault(DefaultTester.class.getName(), true, ImmutableMap.of(
"set", 789
)),
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of(
"value", 1,
"set", 456,
"arrays", Collections.singletonList(1)
))
))
.build();
pluginDefaults:
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
forced: true
values:
set: 2
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
forced: true
values:
set: 3
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
forced: false
values:
set: 4
value: 1
arrays: [1]
""";
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
// When
FlowWithSource injected = pluginDefaultService.parseFlowWithAllDefaults(null, source, false);
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(123));
// Then
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(2));
}
@Test
public void prefix() {
public void shouldInjectDefaultGivenPrefixType() {
// Given
String source = """
id: default-test
namespace: io.kestra.tests
@@ -274,87 +281,80 @@ class PluginDefaultServiceTest {
tasks:
- id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666""";
set: 666
FlowWithSource flow = yamlParser.parse(source, Flow.class)
.withSource(source)
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of(
"set", 789
)),
new PluginDefault("io.kestra.core.services.", false, ImmutableMap.of(
"value", 2,
"set", 456,
"arrays", Collections.singletonList(1)
)),
new PluginDefault("io.kestra.core.services2.", false, ImmutableMap.of(
"value", 3
))
))
.build();
pluginDefaults:
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
values:
set: 789
- type: io.kestra.core.services.
values:
set: 456
value: 2
- type: io.kestra.core.services2.
values:
value: 3
""";
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
// When
FlowWithSource injected = pluginDefaultService.parseFlowWithAllDefaults(null, source, false);
// Then
assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(666));
assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(2));
}
@Test
void alias() {
String source = """
void shouldInjectFlowDefaultsGivenAlias() {
// Given
GenericFlow flow = GenericFlow.fromYaml(null, """
id: default-test
namespace: io.kestra.tests
tasks:
- id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666""";
set: 666
FlowWithSource flow = yamlParser.parse(source, Flow.class)
.withSource(source)
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault("io.kestra.core.services.DefaultTesterAlias", false, ImmutableMap.of(
"value", 1
))
))
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
pluginDefaults:
- type: io.kestra.core.services.DefaultTesterAlias
values:
value: 1
"""
);
// When
FlowWithSource injected = pluginDefaultService.injectAllDefaults(flow, true);
// Then
assertThat(((DefaultTester) injected.getTasks().getFirst()).getValue(), is(1));
}
@Test
void defaultOverride() {
String source = """
void shouldInjectFlowDefaultsGivenType() {
GenericFlow flow = GenericFlow.fromYaml(null, """
id: default-test
namespace: io.kestra.tests
tasks:
- id: test
type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
set: 666""";
set: 666
FlowWithSource flow = yamlParser.parse(source, Flow.class)
.withSource(source)
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of(
"defaultValue", "overridden"
))
))
.build();
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
pluginDefaults:
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
values:
defaultValue: overridden
"""
);
FlowWithSource injected = pluginDefaultService.injectAllDefaults(flow, true);
assertThat(((DefaultTester) injected.getTasks().getFirst()).getDefaultValue(), is("overridden"));
}
@Test
public void taskValueOverTaskDefaults() {
String source = """
public void shouldNotInjectDefaultsGivenExistingTaskValue() {
// Given
GenericFlow flow = GenericFlow.fromYaml(null, """
id: default-test
namespace: io.kestra.tests
@@ -362,20 +362,19 @@ class PluginDefaultServiceTest {
- id: test
type: io.kestra.plugin.core.log.Log
message: testing
level: INFO""";
level: INFO
FlowWithSource flow = yamlParser.parse(source, Flow.class)
.withSource(source)
.toBuilder()
.pluginDefaults(List.of(
new PluginDefault(Log.class.getName(), false, ImmutableMap.of(
"level", Level.WARN
))
))
.build();
pluginDefaults:
- type: io.kestra.core.services.PluginDefaultServiceTest$DefaultTester
values:
defaultValue: WARN
"""
);
FlowWithSource injected = pluginDefaultService.injectDefaults(flow);
// When
FlowWithSource injected = pluginDefaultService.injectAllDefaults(flow, true);
// Then
assertThat(((Log) injected.getTasks().getFirst()).getLevel().toString(), is(Level.INFO.name()));
}

View File

@@ -5,6 +5,7 @@ import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecutionResult;
import io.kestra.plugin.core.flow.Subflow;
@@ -38,7 +39,7 @@ import java.util.Optional;
public class BadExecutable extends Subflow {
@Override
public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, Flow flow, Execution execution) {
public Optional<SubflowExecutionResult> createSubflowExecutionResult(RunContext runContext, TaskRun taskRun, FlowInterface flow, Execution execution) {
throw new RuntimeException("An error!");
}
}

View File

@@ -1,7 +1,8 @@
package io.kestra.core.services;
package io.kestra.core.topologies;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.property.Property;
import io.kestra.core.serializers.YamlParser;
import io.kestra.plugin.core.condition.ExecutionFlow;
import io.kestra.plugin.core.condition.ExecutionStatus;
import io.kestra.plugin.core.condition.MultipleCondition;
@@ -9,18 +10,18 @@ import io.kestra.plugin.core.condition.Expression;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.topologies.FlowRelation;
import io.kestra.core.serializers.YamlParser;
import io.kestra.plugin.core.debug.Return;
import io.kestra.plugin.core.flow.Parallel;
import io.kestra.plugin.core.flow.Subflow;
import io.kestra.core.topologies.FlowTopologyService;
import io.kestra.core.utils.TestsUtils;
import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
@@ -30,15 +31,13 @@ import static org.hamcrest.Matchers.nullValue;
@KestraTest
class FlowTopologyServiceTest {
@Inject
private FlowTopologyService flowTopologyService;
@Inject
private YamlParser yamlParser = new YamlParser();
@Test
void flowTask() {
FlowWithSource parent = Flow.builder()
Flow parent = Flow.builder()
.namespace("io.kestra.ee")
.id("parent")
.revision(1)
@@ -55,58 +54,54 @@ class FlowTopologyServiceTest {
))
.build()
))
.build()
.withSource(null);
.build();
FlowWithSource child = Flow.builder()
FlowWithSource child = FlowWithSource.builder()
.namespace("io.kestra.ee")
.id("child")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TASK));
}
@Test
void noRelation() {
FlowWithSource parent = Flow.builder()
Flow parent = Flow.builder()
.namespace("io.kestra.ee")
.id("parent")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
FlowWithSource child = Flow.builder()
Flow child = Flow.builder()
.namespace("io.kestra.ee")
.id("child")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
assertThat(flowTopologyService.isChild(parent, child), nullValue());
}
@Test
void trigger() {
FlowWithSource parent = Flow.builder()
Flow parent = Flow.builder()
.namespace("io.kestra.ee")
.id("parent")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
FlowWithSource child = Flow.builder()
Flow child = Flow.builder()
.namespace("io.kestra.ee")
.id("child")
.revision(1)
.tasks(List.of(returnTask()))
.triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder()
.type(io.kestra.plugin.core.trigger.Flow.class.getName())
.conditions(List.of(
ExecutionFlow.builder()
.namespace(Property.of("io.kestra.ee"))
@@ -118,42 +113,42 @@ class FlowTopologyServiceTest {
))
.build()
))
.build()
.withSource(null);
.build();
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER));
}
@Test
void multipleCondition() {
FlowWithSource parent = Flow.builder()
Flow parent = Flow.builder()
.namespace("io.kestra.ee")
.id("parent")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
FlowWithSource noTrigger = Flow.builder()
Flow noTrigger = Flow.builder()
.namespace("io.kestra.exclude")
.id("no")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
FlowWithSource child = Flow.builder()
Flow child = Flow.builder()
.namespace("io.kestra.ee")
.id("child")
.revision(1)
.tasks(List.of(returnTask()))
.triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder()
.type(io.kestra.plugin.core.trigger.Flow.class.getName())
.conditions(List.of(
ExecutionStatus.builder()
.in(Property.of(List.of(State.Type.SUCCESS)))
.type(ExecutionStatus.class.getName())
.build(),
MultipleCondition.builder()
.type(MultipleCondition.class.getName())
.conditions(Map.of(
"first", ExecutionFlow.builder()
.namespace(Property.of("io.kestra.ee"))
@@ -175,8 +170,7 @@ class FlowTopologyServiceTest {
))
.build()
))
.build()
.withSource(null);
.build();
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER));
@@ -185,29 +179,28 @@ class FlowTopologyServiceTest {
@Test
void preconditions() {
FlowWithSource parent = Flow.builder()
Flow parent = Flow.builder()
.namespace("io.kestra.ee")
.id("parent")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
FlowWithSource noTrigger = Flow.builder()
Flow noTrigger = Flow.builder()
.namespace("io.kestra.exclude")
.id("no")
.revision(1)
.tasks(List.of(returnTask()))
.build()
.withSource(null);
.build();
FlowWithSource child = Flow.builder()
Flow child = Flow.builder()
.namespace("io.kestra.ee")
.id("child")
.revision(1)
.tasks(List.of(returnTask()))
.triggers(List.of(
io.kestra.plugin.core.trigger.Flow.builder()
.type(io.kestra.plugin.core.trigger.Flow.class.getName())
.preconditions(io.kestra.plugin.core.trigger.Flow.Preconditions.builder()
.flows(List.of(
io.kestra.plugin.core.trigger.Flow.UpstreamFlow.builder().namespace("io.kestra.ee").flowId("parent").build(),
@@ -217,8 +210,7 @@ class FlowTopologyServiceTest {
)
.build()
))
.build()
.withSource(null);
.build();
assertThat(flowTopologyService.isChild(parent, child), is(FlowRelation.FLOW_TRIGGER));
@@ -226,16 +218,15 @@ class FlowTopologyServiceTest {
}
@Test
void self1() {
FlowWithSource flow = parse("flows/valids/trigger-multiplecondition-listener.yaml").toBuilder().revision(1).build().withSource(null);
void self1() throws IOException {
Flow flow = parse("flows/valids/trigger-multiplecondition-listener.yaml").toBuilder().revision(1).build();
assertThat(flowTopologyService.isChild(flow, flow), nullValue());
}
@Test
void self() {
FlowWithSource flow = parse("flows/valids/trigger-flow-listener.yaml").toBuilder().revision(1).build().withSource(null);
void self() throws IOException {
Flow flow = parse("flows/valids/trigger-flow-listener.yaml").toBuilder().revision(1).build();
assertThat(flowTopologyService.isChild(flow, flow), nullValue());
}
@@ -247,12 +238,12 @@ class FlowTopologyServiceTest {
.build();
}
private Flow parse(String path) {
private Flow parse(String path) throws IOException {
URL resource = TestsUtils.class.getClassLoader().getResource(path);
assert resource != null;
File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class);
return YamlParser.parse(Files.readString(file.toPath()), Flow.class);
}
}

View File

@@ -21,8 +21,6 @@ import static org.hamcrest.Matchers.is;
class FlowValidationTest {
@Inject
private ModelValidator modelValidator;
@Inject
private YamlParser yamlParser;
@Test
void invalidRecursiveFlow() {
@@ -57,6 +55,6 @@ class FlowValidationTest {
File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class);
return YamlParser.parse(file, Flow.class);
}
}

View File

@@ -28,8 +28,6 @@ import org.junit.jupiter.api.Test;
@KestraTest(startRunner = true)
public class DagTest {
@Inject
YamlParser yamlParser = new YamlParser();
@Inject
ModelValidator modelValidator;
@@ -96,6 +94,6 @@ public class DagTest {
File file = new File(resource.getFile());
return yamlParser.parse(file, Flow.class);
return YamlParser.parse(file, Flow.class);
}
}

View File

@@ -5,13 +5,13 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
@@ -33,9 +33,6 @@ class TimeoutTest {
@Inject
FlowRepositoryInterface flowRepository;
@Inject
PluginDefaultService pluginDefaultService;
@Inject
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
private QueueInterface<LogEntry> workerTaskLogQueue;
@@ -60,7 +57,7 @@ class TimeoutTest {
.build()))
.build();
flowRepository.create(flow, flow.generateSource(), pluginDefaultService.injectDefaults(flow.withSource(flow.generateSource())));
flowRepository.create(GenericFlow.of(flow));
Execution execution = runnerUtils.runOne(flow.getTenantId(), flow.getNamespace(), flow.getId());

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.h2;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepository;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -16,7 +17,7 @@ import java.util.Map;
@H2RepositoryEnabled
public class H2FlowRepository extends AbstractJdbcFlowRepository {
@Inject
public H2FlowRepository(@Named("flows") H2Repository<Flow> repository,
public H2FlowRepository(@Named("flows") H2Repository<FlowInterface> repository,
ApplicationContext applicationContext) {
super(repository, applicationContext);
}

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.h2;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.AbstractJdbcRepository;
import org.jooq.*;
import org.jooq.impl.DSL;
@@ -14,7 +15,7 @@ import static io.kestra.core.models.QueryFilter.Op.EQUALS;
import static io.kestra.jdbc.repository.AbstractJdbcRepository.field;
public abstract class H2FlowRepositoryService {
public static Condition findCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query, Map<String, String> labels) {
public static Condition findCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query, Map<String, String> labels) {
List<Condition> conditions = new ArrayList<>();
if (query != null) {
@@ -35,7 +36,7 @@ public abstract class H2FlowRepositoryService {
return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions);
}
public static Condition findSourceCodeCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query) {
public static Condition findSourceCodeCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query) {
return jdbcRepository.fullTextCondition(List.of("source_code"), query);
}

View File

@@ -28,7 +28,7 @@ public class H2Queue<T> extends JdbcQueue<T> {
AbstractJdbcRepository.field("offset")
)
.from(this.table)
.where(AbstractJdbcRepository.field("type").eq(this.cls.getName()))
.where(AbstractJdbcRepository.field("type").eq(queueType()))
.and(DSL.or(List.of(
AbstractJdbcRepository.field("consumers").isNull(),
DSL.condition("NOT(ARRAY_CONTAINS(\"consumers\", ?))", queueType)

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.triggers.Trigger;
@@ -87,8 +88,8 @@ public class H2QueueFactory implements QueueFactoryInterface {
@Singleton
@Named(QueueFactoryInterface.FLOW_NAMED)
@Bean(preDestroy = "close")
public QueueInterface<FlowWithSource> flow() {
return new H2Queue<>(FlowWithSource.class, applicationContext);
public QueueInterface<FlowInterface> flow() {
return new H2Queue<>(FlowInterface.class, applicationContext);
}
@Override

View File

@@ -0,0 +1,40 @@
ALTER TABLE queues ALTER COLUMN "type" ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.models.flows.FlowWithSource',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;
UPDATE queues set "type" = 'io.kestra.core.models.flows.FlowInterface' WHERE "type" = 'io.kestra.core.models.flows.FlowWithSource';
ALTER TABLE queues ALTER COLUMN "type" ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;

View File

@@ -1,12 +1,12 @@
package io.kestra.runner.h2;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.FlowListeners;
import io.kestra.core.runners.FlowListenersTest;
import io.kestra.core.services.PluginDefaultService;
import io.kestra.jdbc.JdbcTestUtils;
import io.kestra.jdbc.JooqDSLContextWrapper;
import jakarta.inject.Inject;
@@ -26,12 +26,15 @@ class H2FlowListenersTest extends FlowListenersTest {
@Inject
@Named(QueueFactoryInterface.FLOW_NAMED)
QueueInterface<FlowWithSource> flowQueue;
QueueInterface<FlowInterface> flowQueue;
@Inject
PluginDefaultService pluginDefaultService;
@Test
public void all() {
// we don't inject FlowListeners to remove a flaky test
this.suite(new FlowListeners(flowRepository, flowQueue));
this.suite(new FlowListeners(flowRepository, flowQueue, pluginDefaultService));
}
@BeforeEach

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.mysql;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepository;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -16,7 +17,7 @@ import java.util.Map;
@MysqlRepositoryEnabled
public class MysqlFlowRepository extends AbstractJdbcFlowRepository {
@Inject
public MysqlFlowRepository(@Named("flows") MysqlRepository<Flow> repository,
public MysqlFlowRepository(@Named("flows") MysqlRepository<FlowInterface> repository,
ApplicationContext applicationContext) {
super(repository, applicationContext);
}

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.mysql;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.AbstractJdbcRepository;
import org.jooq.Condition;
import org.jooq.Field;
@@ -12,7 +13,7 @@ import java.util.*;
import static io.kestra.core.models.QueryFilter.Op.EQUALS;
public abstract class MysqlFlowRepositoryService {
public static Condition findCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query, Map<String, String> labels) {
public static Condition findCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query, Map<String, String> labels) {
List<Condition> conditions = new ArrayList<>();
if (query != null) {
@@ -29,7 +30,7 @@ public abstract class MysqlFlowRepositoryService {
return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions);
}
public static Condition findSourceCodeCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query) {
public static Condition findSourceCodeCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query) {
return jdbcRepository.fullTextCondition(Collections.singletonList("source_code"), query);
}

View File

@@ -16,6 +16,7 @@ import org.jooq.DSLContext;
import org.jooq.Field;
import org.jooq.Record;
import org.jooq.RecordMapper;
import org.jooq.Result;
import org.jooq.Select;
import org.jooq.SelectConditionStep;
import org.jooq.impl.DSL;
@@ -58,15 +59,14 @@ public class MysqlRepository<T> extends AbstractJdbcRepository<T> {
return DSL.condition("MATCH (" + String.join(", ", fields) + ") AGAINST (? IN BOOLEAN MODE)", match);
}
@Override
public <R extends Record, E> ArrayListTotal<E> fetchPage(DSLContext context, SelectConditionStep<R> select, Pageable pageable, RecordMapper<R, E> mapper) {
List<E> map = this.pageable(select, pageable)
.fetch()
.map(mapper);
Result<R> records = this.pageable(select, pageable).fetch();
return dslContextWrapper.transactionResult(configuration -> new ArrayListTotal<>(
map,
DSL.using(configuration).fetchOne("SELECT FOUND_ROWS()").into(Integer.class)
));
return dslContextWrapper.transactionResult(configuration -> {
Integer rows = context.fetchOne("SELECT FOUND_ROWS()").into(Integer.class);
return new ArrayListTotal<>(records.map(mapper), rows);
});
}
@Override

View File

@@ -37,7 +37,7 @@ public class MysqlQueue<T> extends JdbcQueue<T> {
)
// force using the dedicated index, or it made a scan of the PK index
.from(this.table.useIndex("ix_type__consumers"))
.where(AbstractJdbcRepository.field("type").eq(this.cls.getName()))
.where(AbstractJdbcRepository.field("type").eq(queueType()))
.and(DSL.or(List.of(
AbstractJdbcRepository.field("consumers").isNull(),
AbstractJdbcRepository.field("consumers").in(QUEUE_CONSUMERS.allForConsumerNotIn(queueType))

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.triggers.Trigger;
@@ -87,8 +88,8 @@ public class MysqlQueueFactory implements QueueFactoryInterface {
@Singleton
@Named(QueueFactoryInterface.FLOW_NAMED)
@Bean(preDestroy = "close")
public QueueInterface<FlowWithSource> flow() {
return new MysqlQueue<>(FlowWithSource.class, applicationContext);
public QueueInterface<FlowInterface> flow() {
return new MysqlQueue<>(FlowInterface.class, applicationContext);
}
@Override

View File

@@ -0,0 +1,40 @@
ALTER TABLE queues MODIFY COLUMN `type` ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.models.flows.FlowWithSource',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;
UPDATE queues set `type` = 'io.kestra.core.models.flows.FlowInterface' WHERE `type` = 'io.kestra.core.models.flows.FlowWithSource';
ALTER TABLE queues MODIFY COLUMN `type` ENUM(
'io.kestra.core.models.executions.Execution',
'io.kestra.core.models.templates.Template',
'io.kestra.core.models.executions.ExecutionKilled',
'io.kestra.core.runners.WorkerJob',
'io.kestra.core.runners.WorkerTaskResult',
'io.kestra.core.runners.WorkerInstance',
'io.kestra.core.runners.WorkerTaskRunning',
'io.kestra.core.models.executions.LogEntry',
'io.kestra.core.models.triggers.Trigger',
'io.kestra.ee.models.audits.AuditLog',
'io.kestra.core.models.executions.MetricEntry',
'io.kestra.core.runners.WorkerTriggerResult',
'io.kestra.core.runners.SubflowExecutionResult',
'io.kestra.core.server.ClusterEvent',
'io.kestra.core.runners.SubflowExecutionEnd',
'io.kestra.core.models.flows.FlowInterface'
) NOT NULL;

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.postgres;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepository;
import io.micronaut.context.ApplicationContext;
import jakarta.inject.Inject;
@@ -16,7 +17,7 @@ import java.util.Map;
@PostgresRepositoryEnabled
public class PostgresFlowRepository extends AbstractJdbcFlowRepository {
@Inject
public PostgresFlowRepository(@Named("flows") PostgresRepository<Flow> repository,
public PostgresFlowRepository(@Named("flows") PostgresRepository<FlowInterface> repository,
ApplicationContext applicationContext) {
super(repository, applicationContext);
}

View File

@@ -2,6 +2,7 @@ package io.kestra.repository.postgres;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowScope;
import io.kestra.jdbc.AbstractJdbcRepository;
import org.jooq.Condition;
@@ -14,7 +15,7 @@ import static io.kestra.jdbc.repository.AbstractJdbcRepository.field;
import static io.kestra.jdbc.repository.AbstractJdbcTriggerRepository.NAMESPACE_FIELD;
public abstract class PostgresFlowRepositoryService {
public static Condition findCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query, Map<String, String> labels) {
public static Condition findCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query, Map<String, String> labels) {
List<Condition> conditions = new ArrayList<>();
if (query != null) {
@@ -31,7 +32,7 @@ public abstract class PostgresFlowRepositoryService {
return conditions.isEmpty() ? DSL.trueCondition() : DSL.and(conditions);
}
public static Condition findSourceCodeCondition(AbstractJdbcRepository<Flow> jdbcRepository, String query) {
public static Condition findSourceCodeCondition(AbstractJdbcRepository<? extends FlowInterface> jdbcRepository, String query) {
return jdbcRepository.fullTextCondition(Collections.singletonList("FULLTEXT_INDEX(source_code)"), query);
}

View File

@@ -37,7 +37,7 @@ public class PostgresQueue<T> extends JdbcQueue<T> {
map.put(
AbstractJdbcRepository.field("type"),
DSL.field("CAST(? AS queue_type)", this.cls.getName())
DSL.field("CAST(? AS queue_type)", queueType())
);
return map;
@@ -59,7 +59,7 @@ public class PostgresQueue<T> extends JdbcQueue<T> {
AbstractJdbcRepository.field("offset")
)
.from(this.table)
.where(DSL.condition("type = CAST(? AS queue_type)", this.cls.getName()))
.where(DSL.condition("type = CAST(? AS queue_type)", queueType()))
.and(AbstractJdbcRepository.field("consumer_" + queueType, Boolean.class).isFalse());
if (consumerGroup != null) {

View File

@@ -4,6 +4,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionKilled;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.models.executions.MetricEntry;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.templates.Template;
import io.kestra.core.models.triggers.Trigger;
@@ -87,8 +88,8 @@ public class PostgresQueueFactory implements QueueFactoryInterface {
@Singleton
@Named(QueueFactoryInterface.FLOW_NAMED)
@Bean(preDestroy = "close")
public QueueInterface<FlowWithSource> flow() {
return new PostgresQueue<>(FlowWithSource.class, applicationContext);
public QueueInterface<FlowInterface> flow() {
return new PostgresQueue<>(FlowInterface.class, applicationContext);
}
@Override

View File

@@ -0,0 +1,9 @@
DO $$
BEGIN
BEGIN
ALTER TYPE queue_type RENAME VALUE 'io.kestra.core.models.flows.FlowWithSource' TO 'io.kestra.core.models.flows.FlowInterface';
EXCEPTION
WHEN invalid_parameter_value THEN null;
END;
END;
$$;

Some files were not shown because too many files have changed in this diff Show More