Compare commits

..

1 Commits

Author SHA1 Message Date
Roman Acevedo
9575cc1c87 fake commit to run flaky tests 2025-12-05 18:07:27 +01:00
206 changed files with 2352 additions and 3566 deletions

View File

@@ -51,7 +51,7 @@ updates:
storybook:
applies-to: version-updates
patterns: ["storybook*", "@storybook/*", "eslint-plugin-storybook"]
patterns: ["storybook*", "@storybook/*"]
vitest:
applies-to: version-updates
@@ -67,10 +67,10 @@ updates:
"@types/*",
"storybook*",
"@storybook/*",
"eslint-plugin-storybook",
"vitest",
"@vitest/*",
# Temporary exclusion of these packages from major updates
"eslint-plugin-storybook",
"eslint-plugin-vue",
]
@@ -84,7 +84,6 @@ updates:
"@types/*",
"storybook*",
"@storybook/*",
"eslint-plugin-storybook",
"vitest",
"@vitest/*",
# Temporary exclusion of these packages from minor updates
@@ -103,7 +102,6 @@ updates:
"@types/*",
"storybook*",
"@storybook/*",
"eslint-plugin-storybook",
"vitest",
"@vitest/*",
]

View File

@@ -64,7 +64,6 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
publish-develop-maven:

View File

@@ -32,4 +32,3 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}

View File

@@ -29,8 +29,8 @@ start_time2=$(date +%s)
echo "cd ./ui"
cd ./ui
echo "npm ci"
npm ci
echo "npm i"
npm i
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"

View File

@@ -21,7 +21,7 @@ plugins {
// test
id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.2.1.6560"
id "org.sonarqube" version "7.1.0.6387"
id 'jacoco-report-aggregation'
// helper
@@ -331,7 +331,7 @@ subprojects {
}
dependencies {
agent "org.aspectj:aspectjweaver:1.9.25.1"
agent "org.aspectj:aspectjweaver:1.9.25"
}
test {

View File

@@ -82,8 +82,8 @@ dependencies {
testImplementation "io.micronaut:micronaut-http-server-netty"
testImplementation "io.micronaut:micronaut-management"
testImplementation "org.testcontainers:testcontainers:1.21.4"
testImplementation "org.testcontainers:junit-jupiter:1.21.4"
testImplementation "org.testcontainers:testcontainers:1.21.3"
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
testImplementation "org.bouncycastle:bcpkix-jdk18on"
testImplementation "org.wiremock:wiremock-jetty12"

View File

@@ -3,7 +3,6 @@ package io.kestra.core.docs;
import io.kestra.core.models.annotations.PluginSubGroup;
import io.kestra.core.plugins.RegisteredPlugin;
import io.micronaut.core.annotation.Nullable;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.NoArgsConstructor;
@@ -118,17 +117,10 @@ public class Plugin {
.filter(not(io.kestra.core.models.Plugin::isInternal))
.filter(clazzFilter)
.filter(c -> !c.getName().startsWith("org.kestra."))
.map(c -> {
Schema schema = c.getAnnotation(Schema.class);
var title = Optional.ofNullable(schema).map(Schema::title).filter(t -> !t.isEmpty()).orElse(null);
var description = Optional.ofNullable(schema).map(Schema::description).filter(d -> !d.isEmpty()).orElse(null);
var deprecated = io.kestra.core.models.Plugin.isDeprecated(c) ? true : null;
return new PluginElementMetadata(c.getName(), deprecated, title, description);
})
.map(c -> new PluginElementMetadata(c.getName(), io.kestra.core.models.Plugin.isDeprecated(c) ? true : null))
.toList();
}
public record PluginElementMetadata(String cls, Boolean deprecated, String title, String description) {}
public record PluginElementMetadata(String cls, Boolean deprecated) {
}
}

View File

@@ -4,16 +4,13 @@ import io.kestra.core.utils.MapUtils;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Pattern;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
public record Label(
@NotEmpty @Pattern(regexp = "^[\\p{Ll}][\\p{L}0-9._-]*$", message = "Invalid label key. A valid key contains only lowercase letters numbers hyphens (-) underscores (_) or periods (.) and must begin with a lowercase letter.") String key,
@NotEmpty String value) {
public record Label(@NotEmpty String key, @NotEmpty String value) {
public static final String SYSTEM_PREFIX = "system.";
// system labels
@@ -26,7 +23,6 @@ public record Label(
public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
public static final String TEST = SYSTEM_PREFIX + "test";
public static final String FROM = SYSTEM_PREFIX + "from";
/**
* Static helper method for converting a list of labels to a nested map.

View File

@@ -94,7 +94,7 @@ public record QueryFilter(
KIND("kind") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS,Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
return List.of(Op.EQUALS,Op.NOT_EQUALS);
}
},
LABELS("labels") {
@@ -106,7 +106,7 @@ public record QueryFilter(
FLOW_ID("flowId") {
@Override
public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
}
},
UPDATED("updated") {
@@ -226,7 +226,7 @@ public record QueryFilter(
FLOW {
@Override
public List<Field> supportedField() {
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE, Field.FLOW_ID);
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
}
},
NAMESPACE {
@@ -241,7 +241,7 @@ public record QueryFilter(
return List.of(
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
Field.NAMESPACE, Field.KIND
Field.NAMESPACE,Field.KIND
);
}
},

View File

@@ -16,7 +16,6 @@ import jakarta.validation.constraints.NotNull;
public class Setting {
public static final String INSTANCE_UUID = "instance.uuid";
public static final String INSTANCE_VERSION = "instance.version";
public static final String INSTANCE_EDITION = "instance.edition";
@NotNull
private String key;

View File

@@ -1,5 +1,6 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -129,7 +130,7 @@ public class Flow extends AbstractFlow implements HasUID {
@Valid
@PluginProperty
List<SLA> sla;
@Schema(
title = "Conditions evaluated before the flow is executed.",
description = "A list of conditions that are evaluated before the flow is executed. If no checks are defined, the flow executes normally."
@@ -354,7 +355,7 @@ public class Flow extends AbstractFlow implements HasUID {
* To be conservative a flow MUST not return any source.
*/
@Override
@Schema(hidden = true)
@JsonIgnore
public String getSource() {
return null;
}

View File

@@ -1,12 +1,14 @@
package io.kestra.core.models.flows;
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.micronaut.core.annotation.Introspected;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import io.swagger.v3.oas.annotations.media.Schema;
import java.util.Objects;
import java.util.regex.Pattern;
@SuperBuilder(toBuilder = true)
@Getter
@@ -46,7 +48,7 @@ public class FlowWithSource extends Flow {
}
@Override
@Schema(hidden = false)
@JsonIgnore(value = false)
public String getSource() {
return this.source;
}

View File

@@ -267,10 +267,6 @@ public class State {
return this == Type.RUNNING || this == Type.KILLING;
}
public boolean onlyRunning() {
return this == Type.RUNNING;
}
public boolean isFailed() {
return this == Type.FAILED;
}

View File

@@ -93,7 +93,7 @@ public class Property<T> {
* @return a new {@link Property} without a pre-rendered value
*/
public Property<T> skipCache() {
return new Property<>(expression, true);
return Property.ofExpression(expression);
}
/**

View File

@@ -82,12 +82,6 @@ abstract public class AbstractTrigger implements TriggerInterface {
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
private boolean failOnTriggerError = false;
@PluginProperty(group = PluginProperty.CORE_GROUP)
@Schema(
title = "Specifies whether a trigger is allowed to start a new execution even if a previous run is still in progress."
)
private boolean allowConcurrent = false;
/**
* For backward compatibility: we rename minLogLevel to logLevel.
* @deprecated use {@link #logLevel} instead

View File

@@ -1,37 +1,22 @@
package io.kestra.core.models.triggers;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.annotations.PluginProperty;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.runners.RunContext;
import io.swagger.v3.oas.annotations.media.Schema;
import java.time.ZonedDateTime;
import java.util.Map;
public interface Schedulable extends PollingTriggerInterface{
String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules";
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
Map<String, Object> getInputs();
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
RecoverMissedSchedules getRecoverMissedSchedules();
/**
* Compute the previous evaluation of a trigger.
* This is used when a trigger misses some schedule to compute the next date to evaluate in the past.
*/
ZonedDateTime previousEvaluationDate(ConditionContext conditionContext) throws IllegalVariableEvaluationException;
RecoverMissedSchedules getRecoverMissedSchedules();
/**
* Load the default RecoverMissedSchedules from plugin property, or else ALL.
*/

View File

@@ -172,7 +172,7 @@ public class Trigger extends TriggerContext implements HasUID {
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
try {
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, lastTrigger);
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
} catch (InvalidTriggerConfigurationException e) {
disabled = true;
}

View File

@@ -6,9 +6,12 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.tasks.Output;
import io.kestra.core.models.flows.State;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.RunContext;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.util.*;
public abstract class TriggerService {
@@ -48,6 +51,58 @@ public abstract class TriggerService {
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
}
public static Execution generateScheduledExecution(
AbstractTrigger trigger,
ConditionContext conditionContext,
TriggerContext context,
List<Label> labels,
Map<String, Object> inputs,
Map<String, Object> variables,
Optional<ZonedDateTime> scheduleDate
) {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables);
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(context.getTenantId())
.namespace(context.getNamespace())
.flowId(context.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(scheduleDate.map(date -> date.toInstant()).orElse(null))
.build();
Map<String, Object> allInputs = new HashMap<>();
// add flow inputs with default value
var flow = conditionContext.getFlow();
if (flow.getInputs() != null) {
flow.getInputs().stream()
.filter(input -> input.getDefaults() != null)
.forEach(input -> allInputs.put(input.getId(), input.getDefaults()));
}
if (inputs != null) {
allInputs.putAll(inputs);
}
// add inputs and inject defaults
if (!allInputs.isEmpty()) {
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
}
return execution;
}
private static Execution generateExecution(
String id,
AbstractTrigger trigger,
@@ -56,7 +111,6 @@ public abstract class TriggerService {
ConditionContext conditionContext
) {
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, id));

View File

@@ -1,10 +1,10 @@
package io.kestra.core.repositories;
import io.kestra.core.models.Setting;
import jakarta.validation.ConstraintViolationException;
import java.util.List;
import java.util.Optional;
import jakarta.validation.ConstraintViolationException;
public interface SettingRepositoryInterface {
Optional<Setting> findByKey(String key);
@@ -13,7 +13,5 @@ public interface SettingRepositoryInterface {
Setting save(Setting setting) throws ConstraintViolationException;
Setting internalSave(Setting setting) throws ConstraintViolationException;
Setting delete(Setting setting);
}

View File

@@ -16,8 +16,8 @@ import java.util.function.Function;
public interface TriggerRepositoryInterface extends QueryBuilderInterface<Triggers.Fields> {
Optional<Trigger> findLast(TriggerContext trigger);
Optional<Trigger> findByUid(String uid);
Optional<Trigger> findByExecution(Execution execution);
List<Trigger> findAll(String tenantId);
List<Trigger> findAllForAllTenants();

View File

@@ -6,12 +6,10 @@ import com.google.common.base.CaseFormat;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.metrics.MetricRegistry;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.plugins.PluginConfigurations;
import io.kestra.core.services.KVStoreService;
import io.kestra.core.storages.Storage;
import io.kestra.core.storages.StorageInterface;
@@ -237,14 +235,6 @@ public class DefaultRunContext extends RunContext {
return runContext;
}
@Override
public RunContext cloneForPlugin(Plugin plugin) {
PluginConfigurations pluginConfigurations = applicationContext.getBean(PluginConfigurations.class);
DefaultRunContext runContext = clone();
runContext.pluginConfiguration = pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass());
return runContext;
}
/**
* {@inheritDoc}
*/
@@ -599,11 +589,6 @@ public class DefaultRunContext extends RunContext {
return localPath;
}
@Override
public InputAndOutput inputAndOutput() {
return new InputAndOutputImpl(this.applicationContext, this);
}
/**
* Builder class for constructing new {@link DefaultRunContext} objects.
*/

View File

@@ -189,11 +189,12 @@ public final class ExecutableUtils {
variables.put("taskRunIteration", currentTaskRun.getIteration());
}
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
Instant scheduleOnDate = runContext.render(scheduleDate).as(ZonedDateTime.class).map(date -> date.toInstant()).orElse(null);
Execution execution = Execution
.newExecution(
flow,
(f, e) -> runContext.inputAndOutput().readInputs(f, e, inputs),
(f, e) -> flowInputOutput.readExecutionInputs(f, e, inputs),
newLabels,
Optional.empty())
.withTrigger(ExecutionTrigger.builder()

View File

@@ -3,11 +3,13 @@ package io.kestra.core.runners;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.KestraRuntimeException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Data;
import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Output;
import io.kestra.core.models.flows.RenderableInput;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.FileInput;
@@ -156,7 +158,11 @@ public class FlowInputOutput {
File tempFile = File.createTempFile(prefix, fileExtension);
try (var inputStream = fileUpload.getInputStream();
var outputStream = new FileOutputStream(tempFile)) {
inputStream.transferTo(outputStream);
long transferredBytes = inputStream.transferTo(outputStream);
if (transferredBytes == 0) {
sink.error(new KestraRuntimeException("Can't upload file: " + fileUpload.getFilename()));
return;
}
URI from = storageInterface.from(execution, inputId, fileName, tempFile);
sink.next(Map.entry(inputId, from.toString()));
} finally {
@@ -376,11 +382,11 @@ public class FlowInputOutput {
@SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.as((Property<T>) input.getDefaults().skipCache(), renderer, clazz);
return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
}
@SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.asList((Property<List<T>>) input.getDefaults().skipCache(), renderer, clazz);
return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
}
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
@@ -496,8 +502,8 @@ public class FlowInputOutput {
yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString()));
}
}
case JSON -> (current instanceof Map || current instanceof Collection<?>) ? current : JacksonMapper.toObject(current.toString());
case YAML -> (current instanceof Map || current instanceof Collection<?>) ? current : YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
case JSON -> JacksonMapper.toObject(current.toString());
case YAML -> YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
case URI -> {
Matcher matcher = URI_PATTERN.matcher(current.toString());
if (matcher.matches()) {
@@ -537,6 +543,30 @@ public class FlowInputOutput {
}
}
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
/**
* Mutable wrapper to hold a flow's input, and it's resolved value.
*/

View File

@@ -1,29 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import java.util.List;
import java.util.Map;
/**
* InputAndOutput could be used to work with flow execution inputs and outputs.
*/
public interface InputAndOutput {
/**
* Reads the inputs of a flow execution.
*/
Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs);
/**
* Processes the outputs of a flow execution (parse them based on their types).
*/
Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs);
/**
* Render flow execution outputs.
*/
Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException;
}

View File

@@ -1,56 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.Output;
import io.micronaut.context.ApplicationContext;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class InputAndOutputImpl implements InputAndOutput {
private final FlowInputOutput flowInputOutput;
private final RunContext runContext;
InputAndOutputImpl(ApplicationContext applicationContext, RunContext runContext) {
this.flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
this.runContext = runContext;
}
@Override
public Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs) {
return flowInputOutput.readExecutionInputs(flow, execution, inputs);
}
@Override
public Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs) {
return flowInputOutput.typedOutputs(flow, execution, rOutputs);
}
@Override
public Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
try {
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
} catch (Exception e) {
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
outputsById.put(output.getId(), null);
}
}
}
return outputsById;
}
}

View File

@@ -4,7 +4,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.kestra.core.encryption.EncryptionService;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.AbstractMetricEntry;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.property.PropertyContext;
@@ -205,15 +204,4 @@ public abstract class RunContext implements PropertyContext {
* when Namespace ACLs are used (EE).
*/
public abstract AclChecker acl();
/**
* Clone this run context for a specific plugin.
* @return a new run context with the plugin configuration of the given plugin.
*/
public abstract RunContext cloneForPlugin(Plugin plugin);
/**
* @return an InputAndOutput that can be used to work with inputs and outputs.
*/
public abstract InputAndOutput inputAndOutput();
}

View File

@@ -1,8 +1,10 @@
package io.kestra.core.runners;
import com.google.common.collect.Lists;
import io.kestra.core.models.Plugin;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.models.tasks.runners.TaskRunner;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.plugins.PluginConfigurations;
@@ -51,6 +53,20 @@ public class RunContextInitializer {
@Value("${kestra.encryption.secret-key}")
protected Optional<String> secretKey;
/**
* Initializes the given {@link RunContext} for the given {@link Plugin}.
*
* @param runContext The {@link RunContext} to initialize.
* @param plugin The {@link TaskRunner} used for initialization.
* @return The {@link RunContext} to initialize
*/
public DefaultRunContext forPlugin(final DefaultRunContext runContext,
final Plugin plugin) {
runContext.init(applicationContext);
runContext.setPluginConfiguration(pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass()));
return runContext;
}
/**
* Initializes the given {@link RunContext} for the given {@link WorkerTask} for executor.
*

View File

@@ -55,11 +55,11 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
public RunContextLogger(QueueInterface<LogEntry> logQueue, LogEntry logEntry, org.slf4j.event.Level loglevel, boolean logToFile) {
if (logEntry.getTaskId() != null) {
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTaskId();
this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTaskId();
} else if (logEntry.getTriggerId() != null) {
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTriggerId();
this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTriggerId();
} else {
this.loggerName = baseLoggerName(logEntry);
this.loggerName = "flow." + logEntry.getFlowId();
}
this.logQueue = logQueue;
@@ -68,10 +68,6 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
this.logToFile = logToFile;
}
private String baseLoggerName(LogEntry logEntry) {
return "flow." + logEntry.getTenantId() + "." + logEntry.getNamespace() + "." + logEntry.getFlowId();
}
private static List<LogEntry> logEntry(ILoggingEvent event, String message, org.slf4j.event.Level level, LogEntry logEntry) {
Iterable<String> split;

View File

@@ -81,24 +81,7 @@ public final class YamlParser {
throw toConstraintViolationException(input, resource, e);
}
}
private static String formatYamlErrorMessage(String originalMessage, JsonProcessingException e) {
StringBuilder friendlyMessage = new StringBuilder();
if (originalMessage.contains("Expected a field name")) {
friendlyMessage.append("YAML syntax error: Invalid structure. Check indentation and ensure all fields are properly formatted.");
} else if (originalMessage.contains("MappingStartEvent")) {
friendlyMessage.append("YAML syntax error: Unexpected mapping start. Verify that scalar values are properly quoted if needed.");
} else if (originalMessage.contains("Scalar value")) {
friendlyMessage.append("YAML syntax error: Expected a simple value but found complex structure. Check for unquoted special characters.");
} else {
friendlyMessage.append("YAML parsing error: ").append(originalMessage.replaceAll("org\\.yaml\\.snakeyaml.*", "").trim());
}
if (e.getLocation() != null) {
int line = e.getLocation().getLineNr();
friendlyMessage.append(String.format(" (at line %d)", line));
}
// Return a generic but cleaner message for other YAML errors
return friendlyMessage.toString();
}
@SuppressWarnings("unchecked")
public static <T> ConstraintViolationException toConstraintViolationException(T target, String resource, JsonProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException constraintViolationException) {
@@ -138,12 +121,11 @@ public final class YamlParser {
)
));
} else {
String userFriendlyMessage = formatYamlErrorMessage(e.getMessage(), e);
return new ConstraintViolationException(
"Illegal " + resource + " source: " + userFriendlyMessage,
"Illegal " + resource + " source: " + e.getMessage(),
Collections.singleton(
ManualConstraintViolation.of(
userFriendlyMessage,
e.getCause() == null ? e.getMessage() : e.getMessage() + "\nCaused by: " + e.getCause().getMessage(),
target,
(Class<T>) target.getClass(),
"yaml",
@@ -154,3 +136,4 @@ public final class YamlParser {
}
}
}

View File

@@ -4,6 +4,7 @@ import com.cronutils.utils.VisibleForTesting;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
@@ -64,6 +65,16 @@ public class ConditionService {
return this.valid(flow, conditions, conditionContext);
}
/**
* Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated.
*/
public boolean isValid(List<ScheduleCondition> conditions, ConditionContext conditionContext) throws InternalException {
return conditions
.stream()
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
}
/**
* Check that all conditions are valid.
* Warning, this method throws if a condition cannot be evaluated.

View File

@@ -92,14 +92,7 @@ public class FlowService {
return flowRepository
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
}
private static String formatValidationError(String message) {
if (message.startsWith("Illegal flow source:")) {
// Already formatted by YamlParser, return as-is
return message;
}
// For other validation errors, provide context
return "Validation error: " + message;
}
/**
* Evaluates all checks defined in the given flow using the provided inputs.
* <p>
@@ -181,12 +174,10 @@ public class FlowService {
modelValidator.validate(pluginDefaultService.injectAllDefaults(flow, false));
} catch (ConstraintViolationException e) {
String friendlyMessage = formatValidationError(e.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
validateConstraintViolationBuilder.constraints(e.getMessage());
} catch (FlowProcessingException e) {
if (e.getCause() instanceof ConstraintViolationException cve) {
String friendlyMessage = formatValidationError(cve.getMessage());
validateConstraintViolationBuilder.constraints(friendlyMessage);
if (e.getCause() instanceof ConstraintViolationException) {
validateConstraintViolationBuilder.constraints(e.getMessage());
} else {
Throwable cause = e.getCause() != null ? e.getCause() : e;
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + cause.getMessage());
@@ -588,4 +579,4 @@ public class FlowService {
private IllegalStateException noRepositoryException() {
return new IllegalStateException("No repository found. Make sure the `kestra.repository.type` property is set.");
}
}
}

View File

@@ -1,5 +1,6 @@
package io.kestra.core.storages;
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
import io.kestra.core.services.NamespaceService;
import jakarta.annotation.Nullable;
import org.slf4j.Logger;
@@ -271,13 +272,7 @@ public class InternalStorage implements Storage {
return this.storage.put(context.getTenantId(), context.getNamespace(), resolve, new BufferedInputStream(inputStream));
}
@Override
public Optional<StorageContext.Task> getTaskStorageContext() {
return Optional.ofNullable((context instanceof StorageContext.Task task) ? task : null);
}
@Override
public List<FileAttributes> list(URI uri) throws IOException {
return this.storage.list(context.getTenantId(), context.getNamespace(), uri);
}
}

View File

@@ -173,6 +173,4 @@ public interface Storage {
* @return the task storage context
*/
Optional<StorageContext.Task> getTaskStorageContext();
List<FileAttributes> list(URI uri) throws IOException;
}

View File

@@ -1,39 +1,13 @@
package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import java.util.Optional;
@Singleton
public class EditionProvider {
public Edition get() {
return Edition.OSS;
}
@Inject
private Optional<SettingRepositoryInterface> settingRepository; // repositories are not always there on unit tests
@PostConstruct
void start() {
// check the edition in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
settingRepository.ifPresent(settingRepositoryInterface -> persistEdition(settingRepositoryInterface, get()));
}
private void persistEdition(SettingRepositoryInterface settingRepositoryInterface, Edition edition) {
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_EDITION);
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(edition)) {
settingRepositoryInterface.save(Setting.builder()
.key(Setting.INSTANCE_EDITION)
.value(edition)
.build()
);
}
}
public enum Edition {
OSS,
EE

View File

@@ -11,11 +11,6 @@ import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
/**
* Utility class to create {@link java.util.concurrent.ExecutorService} with {@link java.util.concurrent.ExecutorService} instances.
* WARNING: those instances will use the {@link ThreadUncaughtExceptionHandler} which terminates Kestra if an error occurs in any thread,
* so it should not be used inside plugins.
*/
@Singleton
@Slf4j
public class ExecutorsUtils {

View File

@@ -65,9 +65,10 @@ public class ListUtils {
}
public static List<String> convertToListString(Object object){
return convertToList(object)
.stream()
.map(Object::toString)
.toList();
if (object instanceof List<?> list && (list.isEmpty() || list.getFirst() instanceof String)) {
return (List<String>) list;
} else {
throw new IllegalArgumentException("%s in not an instance of List of String".formatted(object));
}
}
}

View File

@@ -10,7 +10,7 @@ import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
/**
* Utility class for server logging
* Utility class for logging
*/
public final class Logs {
@@ -18,7 +18,7 @@ public final class Logs {
private static final String EXECUTION_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[execution: {}] ";
private static final String TRIGGER_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[trigger: {}] ";
private static final String TASKRUN_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[task: {}] [execution: {}] [taskrun: {}] ";
private Logs() {}
public static void logExecution(FlowId flow, Logger logger, Level level, String message, Object... args) {
@@ -29,7 +29,7 @@ public final class Logs {
}
/**
* Log an {@link Execution} via the executor logger named: 'executor.{tenantId}.{namespace}.{flowId}'.
* Log an {@link Execution} via the execution logger named: 'execution.{flowId}'.
*/
public static void logExecution(Execution execution, Level level, String message, Object... args) {
Logger logger = logger(execution);
@@ -43,7 +43,7 @@ public final class Logs {
}
/**
* Log a {@link TriggerContext} via the scheduler logger named: 'trigger.{tenantId}.{namespace}.{flowId}.{triggerId}'.
* Log a {@link TriggerContext} via the trigger logger named: 'trigger.{flowId}.{triggereId}'.
*/
public static void logTrigger(TriggerContext triggerContext, Level level, String message, Object... args) {
Logger logger = logger(triggerContext);
@@ -57,7 +57,7 @@ public final class Logs {
}
/**
* Log a {@link TaskRun} via the worker logger named: 'worker.{tenantId}.{namespace}.{flowId}.{taskId}'.
* Log a {@link TaskRun} via the taskRun logger named: 'task.{flowId}.{taskId}'.
*/
public static void logTaskRun(TaskRun taskRun, Level level, String message, Object... args) {
String prefix = TASKRUN_PREFIX_WITH_TENANT;
@@ -73,19 +73,19 @@ public final class Logs {
private static Logger logger(TaskRun taskRun) {
return LoggerFactory.getLogger(
"worker." + taskRun.getTenantId() + "." + taskRun.getNamespace() + "." + taskRun.getFlowId() + "." + taskRun.getTaskId()
"task." + taskRun.getFlowId() + "." + taskRun.getTaskId()
);
}
private static Logger logger(TriggerContext triggerContext) {
return LoggerFactory.getLogger(
"scheduler." + triggerContext.getTenantId() + "." + triggerContext.getNamespace() + "." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId()
"trigger." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId()
);
}
private static Logger logger(Execution execution) {
return LoggerFactory.getLogger(
"executor." + execution.getTenantId() + "." + execution.getNamespace() + "." + execution.getFlowId()
"execution." + execution.getFlowId()
);
}
}

View File

@@ -120,10 +120,7 @@ public class MapUtils {
private static Collection<?> mergeCollections(Collection<?> colA, Collection<?> colB) {
List<Object> merged = new ArrayList<>(colA.size() + colB.size());
merged.addAll(colA);
if (!colB.isEmpty()) {
List<?> filtered = colB.stream().filter(it -> !colA.contains(it)).toList();
merged.addAll(filtered);
}
merged.addAll(colB);
return merged;
}

View File

@@ -1,12 +1,14 @@
package io.kestra.core.utils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.kestra.core.models.executions.metrics.Counter;
import io.kestra.core.models.executions.metrics.Timer;
import io.kestra.core.models.tasks.FileExistComportment;
import io.kestra.core.models.tasks.NamespaceFiles;
import io.kestra.core.runners.RunContext;
import io.kestra.core.storages.NamespaceFile;
import jakarta.annotation.PostConstruct;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.apache.commons.lang3.time.StopWatch;
@@ -17,27 +19,26 @@ import java.io.InputStream;
import java.nio.file.Path;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.*;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import static io.kestra.core.utils.Rethrow.throwConsumer;
public final class NamespaceFilesUtils {
private static final int maxThreads = Math.max(Runtime.getRuntime().availableProcessors() * 4, 32);
private static final ExecutorService EXECUTOR_SERVICE = new ThreadPoolExecutor(
0,
maxThreads,
60L,
TimeUnit.SECONDS,
new SynchronousQueue<>(),
new ThreadFactoryBuilder().setNameFormat("namespace-files").build()
);;
@Singleton
public class NamespaceFilesUtils {
@Inject
private ExecutorsUtils executorsUtils;
private NamespaceFilesUtils() {
// utility class pattern
private ExecutorService executorService;
@PostConstruct
public void postConstruct() {
this.executorService = executorsUtils.maxCachedThreadPool(Math.max(Runtime.getRuntime().availableProcessors() * 4, 32), "namespace-file");
}
public static void loadNamespaceFiles(
public void loadNamespaceFiles(
RunContext runContext,
NamespaceFiles namespaceFiles
)
@@ -62,11 +63,7 @@ public final class NamespaceFilesUtils {
matchedNamespaceFiles.addAll(files);
}
// Use half of the available threads to avoid impacting concurrent tasks
int parallelism = maxThreads / 2;
Flux.fromIterable(matchedNamespaceFiles)
.parallel(parallelism)
.runOn(Schedulers.fromExecutorService(EXECUTOR_SERVICE))
.doOnNext(throwConsumer(nsFile -> {
InputStream content = runContext.storage().getFile(nsFile.uri());
Path path = folderPerNamespace ?
@@ -74,7 +71,7 @@ public final class NamespaceFilesUtils {
Path.of(nsFile.path());
runContext.workingDir().putFile(path, content, fileExistComportment);
}))
.sequential()
.publishOn(Schedulers.fromExecutorService(executorService))
.blockLast();
Duration duration = stopWatch.getDuration();

View File

@@ -23,6 +23,7 @@ import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
import io.kestra.core.services.StorageService;
import io.kestra.core.storages.FileAttributes;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.StorageSplitInterface;
import io.kestra.core.utils.GraphUtils;
import io.kestra.core.validations.NoSystemLabelValidation;
@@ -539,7 +540,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
.numberOfBatches((Integer) taskRun.getOutputs().get(ExecutableUtils.TASK_VARIABLE_NUMBER_OF_BATCHES));
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
FileSerde.write(bos, runContext.inputAndOutput().renderOutputs(flow.getOutputs()));
FileSerde.write(bos, FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext));
URI uri = runContext.storage().putFile(
new ByteArrayInputStream(bos.toByteArray()),
URI.create((String) taskRun.getOutputs().get("uri"))
@@ -601,8 +602,9 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
String subflowOutputsBase = (String) taskOutput.get(ExecutableUtils.TASK_VARIABLE_SUBFLOW_OUTPUTS_BASE_URI);
URI subflowOutputsBaseUri = URI.create(StorageContext.KESTRA_PROTOCOL + subflowOutputsBase + "/");
if (runContext.storage().isFileExist(subflowOutputsBaseUri)) {
List<FileAttributes> list = runContext.storage().list(subflowOutputsBaseUri);;
StorageInterface storage = ((DefaultRunContext) runContext).getApplicationContext().getBean(StorageInterface.class);
if (storage.exists(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri)) {
List<FileAttributes> list = storage.list(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri);
if (!list.isEmpty()) {
// Merge outputs from each sub-flow into a single stored in the internal storage.

View File

@@ -157,7 +157,7 @@ public class LoopUntil extends Task implements FlowableTask<LoopUntil.Output> {
public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException {
if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) {
String continueLoop = runContext.render(this.condition).skipCache().as(String.class).orElse(null);
String continueLoop = runContext.render(this.condition).as(String.class).orElse(null);
if (!TruthUtils.isTruthy(continueLoop)) {
return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow());
}

View File

@@ -63,8 +63,7 @@ import java.util.*;
- id: run_post_approval
type: io.kestra.plugin.scripts.shell.Commands
taskRunner:
type: io.kestra.plugin.core.runner.Process
runner: PROCESS
commands:
- echo "Manual approval received! Continuing the execution..."

View File

@@ -18,6 +18,7 @@ import io.kestra.core.models.tasks.ExecutableTask;
import io.kestra.core.models.tasks.Task;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.ExecutableUtils;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.FlowMetaStoreInterface;
import io.kestra.core.runners.RunContext;
import io.kestra.core.runners.SubflowExecution;
@@ -37,6 +38,7 @@ import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.SuperBuilder;
import org.slf4j.event.Level;
import java.time.ZonedDateTime;
import java.util.Collections;
@@ -244,11 +246,11 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
try {
var inputAndOutput = runContext.inputAndOutput();
Map<String, Object> rOutputs = inputAndOutput.renderOutputs(subflowOutputs);
Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
if (flow.getOutputs() != null) {
rOutputs = inputAndOutput.typedOutputs(flow, execution, rOutputs);
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
if (flow.getOutputs() != null && flowInputOutput != null) {
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
}
builder.outputs(rOutputs);
} catch (Exception e) {

View File

@@ -123,7 +123,7 @@ public class Switch extends Task implements FlowableTask<Switch.Output> {
}
private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException {
return runContext.render(this.value).skipCache().as(String.class).orElseThrow();
return runContext.render(this.value).as(String.class).orElseThrow();
}
@Override

View File

@@ -260,7 +260,8 @@ public class WorkingDirectory extends Sequential implements NamespaceFilesInterf
}
if (this.namespaceFiles != null && !Boolean.FALSE.equals(runContext.render(this.namespaceFiles.getEnabled()).as(Boolean.class).orElse(true))) {
NamespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles);
NamespaceFilesUtils namespaceFilesUtils = ((DefaultRunContext) runContext).getApplicationContext().getBean(NamespaceFilesUtils.class);
namespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles);
}
if (this.inputFiles != null) {

View File

@@ -26,28 +26,25 @@ import java.util.concurrent.atomic.AtomicLong;
@Getter
@NoArgsConstructor
@Schema(
title = "Purge namespace files for one or multiple namespaces.",
description = "This task purges namespace files (and their versions) stored in Kestra. You can restrict the purge to specific namespaces (or a namespace glob pattern), optionally include child namespaces, and filter files by a glob pattern. The purge strategy is controlled via `behavior` (e.g. keep the last N versions and/or delete versions older than a given date)."
title = "Delete expired keys globally for a specific namespace.",
description = "This task will delete expired keys from the Kestra KV store. By default, it will only delete expired keys, but you can choose to delete all keys by setting `expiredOnly` to false. You can also filter keys by a specific pattern and choose to include child namespaces."
)
@Plugin(
examples = {
@Example(
title = "Purge old versions of namespace files for a namespace tree.",
title = "Delete expired keys globally for a specific namespace, with or without including child namespaces.",
full = true,
code = """
id: purge_namespace_files
id: purge_kv_store
namespace: system
tasks:
- id: purge_files
type: io.kestra.plugin.core.namespace.PurgeFiles
- id: purge_kv
type: io.kestra.plugin.core.kv.PurgeKV
expiredOnly: true
namespaces:
- company
includeChildNamespaces: true
filePattern: "**/*.sql"
behavior:
type: version
before: "2025-01-01T00:00:00Z"
"""
)
}
@@ -119,7 +116,7 @@ public class PurgeFiles extends Task implements PurgeTask<NamespaceFile>, Runnab
@Getter
public static class Output implements io.kestra.core.models.tasks.Output {
@Schema(
title = "The number of purged namespace file versions"
title = "The number of purged KV pairs"
)
private Long size;
}

View File

@@ -1,107 +0,0 @@
package io.kestra.plugin.core.trigger;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.Label;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.ExecutionTrigger;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.Backfill;
import io.kestra.core.models.triggers.Schedulable;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils;
import java.time.ZonedDateTime;
import java.time.chrono.ChronoZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Factory class for constructing a new {@link Execution} from a {@link Schedulable} trigger.
*
* @see io.kestra.plugin.core.trigger.Schedule
* @see io.kestra.plugin.core.trigger.ScheduleOnDates
*/
final class SchedulableExecutionFactory {
static Execution createFailedExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext) throws IllegalVariableEvaluationException {
return Execution.builder()
.id(conditionContext.getRunContext().getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(SchedulableExecutionFactory.getLabels(trigger, conditionContext.getRunContext(), triggerContext.getBackfill(), conditionContext.getFlow()))
.state(new State().withState(State.Type.FAILED))
.build();
}
static Execution createExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext, Map<String, Object> variables, ZonedDateTime scheduleDate) throws IllegalVariableEvaluationException {
RunContext runContext = conditionContext.getRunContext();
ExecutionTrigger executionTrigger = ExecutionTrigger.of((AbstractTrigger) trigger, variables);
List<Label> labels = getLabels(trigger, runContext, triggerContext.getBackfill(), conditionContext.getFlow());
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
executionLabels.add(new Label(Label.FROM, "trigger"));
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
// add a correlation ID if none exist
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
}
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.variables(conditionContext.getFlow().getVariables())
.labels(executionLabels)
.state(new State())
.trigger(executionTrigger)
.scheduleDate(Optional.ofNullable(scheduleDate).map(ChronoZonedDateTime::toInstant).orElse(null))
.build();
Map<String, Object> allInputs = getInputs(trigger, runContext, triggerContext.getBackfill());
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
execution = execution.withInputs(runContext.inputAndOutput().readInputs(conditionContext.getFlow(), execution, allInputs));
return execution;
}
private static Map<String, Object> getInputs(Schedulable trigger, RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (trigger.getInputs() != null) {
inputs.putAll(runContext.render(trigger.getInputs()));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private static List<Label> getLabels(Schedulable trigger, RunContext runContext, Backfill backfill, FlowInterface flow) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, flow, (AbstractTrigger) trigger);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
}

View File

@@ -6,7 +6,9 @@ import com.cronutils.model.time.ExecutionTime;
import com.cronutils.parser.CronParser;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.Label;
import io.kestra.core.models.annotations.Example;
import io.kestra.core.models.annotations.Plugin;
import io.kestra.core.models.annotations.PluginProperty;
@@ -14,8 +16,12 @@ import io.kestra.core.models.conditions.Condition;
import io.kestra.core.models.conditions.ConditionContext;
import io.kestra.core.models.conditions.ScheduleCondition;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.ConditionService;
import io.kestra.core.services.LabelService;
import io.kestra.core.utils.ListUtils;
import io.kestra.core.validations.ScheduleValidation;
import io.kestra.core.validations.TimezoneId;
@@ -23,7 +29,6 @@ import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Null;
import lombok.AccessLevel;
import lombok.*;
import lombok.experimental.SuperBuilder;
import lombok.extern.slf4j.Slf4j;
@@ -35,8 +40,6 @@ import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Stream;
import static io.kestra.core.utils.Rethrow.throwPredicate;
@Slf4j
@SuperBuilder
@ToString
@@ -221,7 +224,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty
@Deprecated
private List<ScheduleCondition> scheduleConditions;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs;
@Schema(
@@ -241,7 +248,13 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
@PluginProperty
@Deprecated
private Map<String, Object> backfill;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules;
@Override
@@ -390,11 +403,20 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
if (!conditionResults) {
return Optional.empty();
}
} catch (InternalException ie) {
} catch(InternalException ie) {
// validate schedule condition can fail to render variables
// in this case, we return a failed execution so the trigger is not evaluated each second
runContext.logger().error("Unable to evaluate the Schedule trigger '{}'", this.getId(), ie);
return Optional.of(SchedulableExecutionFactory.createFailedExecution(this, conditionContext, triggerContext));
Execution execution = Execution.builder()
.id(runContext.getTriggerExecutionId())
.tenantId(triggerContext.getTenantId())
.namespace(triggerContext.getNamespace())
.flowId(triggerContext.getFlowId())
.flowRevision(conditionContext.getFlow().getRevision())
.labels(generateLabels(runContext, conditionContext, backfill))
.state(new State().withState(State.Type.FAILED))
.build();
return Optional.of(execution);
}
// recalculate true output for previous and next based on conditions
@@ -408,12 +430,14 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
variables = scheduleDates.toMap();
}
Execution execution = SchedulableExecutionFactory.createExecution(
Execution execution = TriggerService.generateScheduledExecution(
this,
conditionContext,
triggerContext,
generateLabels(runContext, conditionContext, backfill),
generateInputs(runContext, backfill),
variables,
null
Optional.empty()
);
return Optional.of(execution);
@@ -424,6 +448,34 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
return parser.parse(this.cron);
}
private List<Label> generateLabels(RunContext runContext, ConditionContext conditionContext, Backfill backfill) throws IllegalVariableEvaluationException {
List<Label> labels = LabelService.fromTrigger(runContext, conditionContext.getFlow(), this);
if (backfill != null && backfill.getLabels() != null) {
for (Label label : backfill.getLabels()) {
final var value = runContext.render(label.value());
if (value != null) {
labels.add(new Label(label.key(), value));
}
}
}
return labels;
}
private Map<String, Object> generateInputs(RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
Map<String, Object> inputs = new HashMap<>();
if (this.inputs != null) {
inputs.putAll(runContext.render(this.inputs));
}
if (backfill != null && backfill.getInputs() != null) {
inputs.putAll(runContext.render(backfill.getInputs()));
}
return inputs;
}
private Optional<Output> scheduleDates(ExecutionTime executionTime, ZonedDateTime date) {
Optional<ZonedDateTime> next = executionTime.nextExecution(date.minus(Duration.ofSeconds(1)));
@@ -497,9 +549,9 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
Optional<ZonedDateTime> truePreviousNextDateWithCondition(ExecutionTime executionTime, ConditionContext conditionContext, ZonedDateTime toTestDate, boolean next) throws InternalException {
int upperYearBound = ZonedDateTime.now().getYear() + 10;
int lowerYearBound = ZonedDateTime.now().getYear() - 10;
while ((next && toTestDate.getYear() < upperYearBound) || (!next && toTestDate.getYear() > lowerYearBound)) {
Optional<ZonedDateTime> currentDate = next ?
executionTime.nextExecution(toTestDate) :
executionTime.lastExecution(toTestDate);
@@ -555,10 +607,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
private boolean validateScheduleCondition(ConditionContext conditionContext) throws InternalException {
if (conditions != null) {
return conditions.stream()
.filter(c -> c instanceof ScheduleCondition)
.map(c -> (ScheduleCondition) c)
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
ConditionService conditionService = ((DefaultRunContext)conditionContext.getRunContext()).getApplicationContext().getBean(ConditionService.class);
return conditionService.isValid(
conditions.stream().filter(c -> c instanceof ScheduleCondition).map(c -> (ScheduleCondition) c).toList(),
conditionContext
);
}
return true;

View File

@@ -10,6 +10,7 @@ import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.VoidOutput;
import io.kestra.core.models.triggers.*;
import io.kestra.core.runners.RunContext;
import io.kestra.core.services.LabelService;
import io.kestra.core.validations.TimezoneId;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
@@ -22,10 +23,7 @@ import java.time.Duration;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.*;
import java.util.function.Predicate;
import static io.kestra.core.utils.Rethrow.throwFunction;
@@ -47,7 +45,11 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@Builder.Default
@Null
private final Duration interval = null;
@Schema(
title = "The inputs to pass to the scheduled flow"
)
@PluginProperty(dynamic = true)
private Map<String, Object> inputs;
@TimezoneId
@@ -61,24 +63,31 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
@NotNull
private Property<List<ZonedDateTime>> dates;
@Schema(
title = "Action to take in the case of missed schedules",
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
"The default is `ALL` unless a different value is configured using the global plugin configuration."
)
@PluginProperty
private RecoverMissedSchedules recoverMissedSchedules;
@Override
public Optional<Execution> evaluate(ConditionContext conditionContext, TriggerContext triggerContext) throws Exception {
RunContext runContext = conditionContext.getRunContext();
ZonedDateTime lastEvaluation = triggerContext.getDate();
Optional<ZonedDateTime> nextDate = nextDate(runContext, date -> date.isEqual(lastEvaluation) || date.isAfter(lastEvaluation));
if (nextDate.isPresent()) {
log.info("Schedule execution on {}", nextDate.get());
Execution execution = SchedulableExecutionFactory.createExecution(
Execution execution = TriggerService.generateScheduledExecution(
this,
conditionContext,
triggerContext,
LabelService.fromTrigger(runContext, conditionContext.getFlow(), this),
this.inputs != null ? runContext.render(this.inputs) : Collections.emptyMap(),
Collections.emptyMap(),
nextDate.orElse(null)
nextDate
);
return Optional.of(execution);
@@ -88,21 +97,29 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
}
@Override
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> triggerContext) {
return triggerContext
.map(ctx -> ctx.getBackfill() != null ? ctx.getBackfill().getCurrentDate() : ctx.getDate())
.map(this::withTimeZone)
.or(() -> Optional.of(ZonedDateTime.now()))
.flatMap(dt -> {
try {
return nextDate(conditionContext.getRunContext(), date -> date.isAfter(dt));
} catch (IllegalVariableEvaluationException e) {
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
throw new InvalidTriggerConfigurationException("Failed to evaluate schedule 'dates'. Cause: " + e.getMessage());
}
}).orElseGet(() -> ZonedDateTime.now().plusYears(1));
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) {
try {
return last
.map(throwFunction(context ->
nextDate(conditionContext.getRunContext(), date -> date.isAfter(context.getDate()))
.orElse(ZonedDateTime.now().plusYears(1))
))
.orElse(conditionContext.getRunContext()
.render(dates)
.asList(ZonedDateTime.class)
.stream()
.sorted()
.findFirst()
.orElse(ZonedDateTime.now()))
.truncatedTo(ChronoUnit.SECONDS);
} catch (IllegalVariableEvaluationException e) {
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
return ZonedDateTime.now().plusYears(1);
}
}
@Override
public ZonedDateTime nextEvaluationDate() {
// TODO this may be the next date from now?
@@ -122,17 +139,9 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
return previousDates.isEmpty() ? ZonedDateTime.now() : previousDates.getFirst();
}
private ZonedDateTime withTimeZone(ZonedDateTime date) {
if (this.timezone == null) {
return date;
}
return date.withZoneSameInstant(ZoneId.of(this.timezone));
}
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> predicate) throws IllegalVariableEvaluationException {
return runContext.render(dates)
.asList(ZonedDateTime.class).stream().sorted()
.filter(predicate)
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> filter) throws IllegalVariableEvaluationException {
return runContext.render(dates).asList(ZonedDateTime.class).stream().sorted()
.filter(date -> filter.test(date))
.map(throwFunction(date -> timezone == null ? date : date.withZoneSameInstant(ZoneId.of(runContext.render(timezone)))))
.findFirst()
.map(date -> date.truncatedTo(ChronoUnit.SECONDS));

View File

@@ -9,14 +9,10 @@
<property name="pattern" value="%date{HH:mm:ss}.%ms %highlight(%-5.5level) %magenta(%-12.36thread) %cyan(%-12.36logger{36}) %msg%n" />
<logger name="io.kestra" level="INFO" />
<!-- Flow execution logs - disabled by default -->
<logger name="flow" level="OFF" />
<!-- Server loggers -->
<logger name="worker" level="INFO" />
<logger name="executor" level="INFO" />
<logger name="scheduler" level="INFO" />
<logger name="flow" level="INFO" />
<logger name="task" level="INFO" />
<logger name="execution" level="INFO" />
<logger name="trigger" level="INFO" />
<logger name="io.kestra.ee.runner.kafka.services.KafkaConsumerService" level="WARN" />
<logger name="io.kestra.ee.runner.kafka.services.KafkaProducerService" level="WARN" />

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.chart
name: "chart"
title: "Chart"
description: "Tasks that render dashboard charts from Kestra data sources."
body: "Use these chart widgets to visualize metrics, executions, or flow trends in dashboards; pair them with dashboard data queries and configure aggregations, groupings, and chart options for Bar, Pie, Time Series, KPI, or Table outputs."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.condition
name: "condition"
title: "Condition"
description: "Tasks that evaluate conditions to control flow execution or triggers."
body: "Use these predicates to gate tasks or triggers based on time windows, calendars, execution metadata, labels, namespaces, retries, or custom expressions; configure required parameters such as allowed states, namespaces, date ranges, or JEXL expressions to return a true/false result."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.data
name: "data"
title: "Data"
description: "Tasks that fetch Kestra executions, flows, logs, metrics, and triggers as datasets for dashboards."
body: "These data providers query Kestra repositories with filters and aggregations to feed dashboard charts; configure columns and fields (such as namespace, state, timestamp, or labels) plus any filters to shape the returned dataset for visualization."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.debug
name: "debug"
title: "Debug"
description: "Tasks that emit debug output while you develop a flow."
body: "Echo and Return help inspect variables and payloads or short-circuit execution during testing; provide the message or value to output so downstream tasks can see exactly what is being passed around."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.execution
name: "execution"
title: "Execution"
description: "Tasks that manage the lifecycle and context of a running execution."
body: "Use these tasks to assert expectations, set or unset variables, add labels, fail, exit, resume, or purge executions; supply required properties such as variable maps, label key/values, or retention rules before altering execution state."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.flow
name: "flow"
title: "Flow"
description: "Tasks that orchestrate control flow within a Kestra pipeline."
body: "Sequence, branch, loop, parallelize, or nest subflows/templates using these primitives; define embedded task lists, values for switches, iteration collections, working directories, and loop exit criteria to structure complex workflows cleanly."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.http
name: "http"
title: "HTTP"
description: "Tasks that interact with HTTP endpoints."
body: "Perform requests, downloads, or webhook triggers with configurable methods, headers, authentication, and payloads; provide the target URI plus any body or query parameters, and use response handling options to store results for downstream tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core
name: "core"
title: "Core Plugins and tasks"
description: "Tasks that provide Kestra's built-in orchestration, I/O, and observability capabilities."
body: "Core plugins cover control-flow, execution management, triggers, storage, HTTP, metrics, logging, templating, and dashboard widgets; combine these foundational tasks to build reliable workflows without adding external dependencies."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.kv
name: "kv"
title: "KV"
description: "Tasks that manage key-value pairs in Kestra's KV store."
body: "Set, get, list, version, and delete namespaced keys to share state across flows; specify the key path, value for writes, and optional namespace or TTL to control how data is stored, retrieved, and purged."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.log
name: "log"
title: "Log"
description: "Tasks that write, fetch, or purge Kestra logs."
body: "Emit structured log messages, retrieve stored logs, or clean up log storage; provide message content or log query filters and consider namespace or execution scoping when purging."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.metric
name: "metric"
title: "Metric"
description: "Tasks that publish custom metrics from flows."
body: "Send counters, gauges, and timing metrics to Kestra's metric store for dashboards and alerts; define the metric name, type, value, labels, and optional timestamp to record meaningful telemetry."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.namespace
name: "namespace"
title: "Namespace"
description: "Tasks that manage namespace files and versions."
body: "Upload, download, delete, purge, or version files stored in a namespace—useful for shipping assets or configs with flows; set the target namespace, paths or glob patterns, and purge behavior to control stored artifacts."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.output
name: "output"
title: "Output"
description: "Tasks that expose outputs from a flow."
body: "Use OutputValues to publish key-value outputs for downstream tasks or subflows; declare the output map and data types that consuming tasks should read."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.runner
name: "runner"
title: "Runner"
description: "Tasks that execute commands on the Kestra worker."
body: "Run shell processes with configurable command, environment, working directory, and input/output handling; ensure commands are idempotent and set expected exit codes or resource needs when invoking external binaries."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.storage
name: "storage"
title: "Storage"
description: "Tasks that manipulate files in Kestra's internal storage."
body: "Write, delete, concatenate, split, deduplicate, filter, reverse, size, or list files used by executions; provide source and target storage URIs and any encoding or line-handling options to transform stored data safely."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.templating
name: "templating"
title: "Templating"
description: "Tasks that render dynamic task specifications from templates."
body: "TemplatedTask lets you supply a Pebble-rendered YAML spec that is parsed and executed at runtime; provide the `spec` property with a valid runnable task definition and avoid recursive templating when composing dynamic tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.trigger
name: "trigger"
title: "Trigger"
description: "Tasks that start flows from schedules or events."
body: "Define cron-based schedules, specific date triggers, webhooks, namespace flow triggers, or toggles; set required properties like cron expressions, webhook secrets, and target flow references to control when executions fire."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -170,11 +170,10 @@ class JsonSchemaGeneratorTest {
Map<String, Object> jsonSchema = jsonSchemaGenerator.generate(AbstractTrigger.class, AbstractTrigger.class);
assertThat((Map<String, Object>) jsonSchema.get("properties"), allOf(
Matchers.aMapWithSize(4),
Matchers.aMapWithSize(3),
hasKey("conditions"),
hasKey("stopAfter"),
hasKey("type"),
hasKey("allowConcurrent")
hasKey("type")
));
});
}

View File

@@ -134,47 +134,4 @@ class LabelTest {
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
assertThat(emptyKeyLabelResult.isPresent()).isTrue();
}
@Test
void shouldValidateValidLabelKeys() {
// Valid keys: start with lowercase; may contain letters, numbers, hyphens, underscores, periods
assertThat(modelValidator.isValid(new Label("foo", "bar")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo_bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar_baz123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("a", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo.bar", "value")).isPresent()).isFalse(); // dot is allowed
}
@Test
void shouldRejectInvalidLabelKeys() {
Optional<ConstraintViolationException> spaceResult = modelValidator.isValid(new Label("foo bar", "value"));
assertThat(spaceResult.isPresent()).isTrue();
Optional<ConstraintViolationException> uppercaseResult = modelValidator.isValid(new Label("Foo", "value"));
assertThat(uppercaseResult.isPresent()).isTrue();
Optional<ConstraintViolationException> emojiResult = modelValidator.isValid(new Label("💩", "value"));
assertThat(emojiResult.isPresent()).isTrue();
Optional<ConstraintViolationException> atSignResult = modelValidator.isValid(new Label("foo@bar", "value"));
assertThat(atSignResult.isPresent()).isTrue();
Optional<ConstraintViolationException> colonResult = modelValidator.isValid(new Label("foo:bar", "value"));
assertThat(colonResult.isPresent()).isTrue();
Optional<ConstraintViolationException> hyphenStartResult = modelValidator.isValid(new Label("-foo", "value"));
assertThat(hyphenStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> underscoreStartResult = modelValidator.isValid(new Label("_foo", "value"));
assertThat(underscoreStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> zeroResult = modelValidator.isValid(new Label("0", "value"));
assertThat(zeroResult.isPresent()).isTrue();
Optional<ConstraintViolationException> digitStartResult = modelValidator.isValid(new Label("9test", "value"));
assertThat(digitStartResult.isPresent()).isTrue();
}
}

View File

@@ -61,9 +61,6 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION),
@@ -171,6 +168,9 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION),

View File

@@ -185,21 +185,4 @@ class FlowTest {
return YamlParser.parse(file, Flow.class);
}
@Test
void illegalNamespaceUpdate() {
Flow original = Flow.builder()
.id("my-flow")
.namespace("io.kestra.prod")
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("hello").build()))
.build();
Flow updated = original.toBuilder()
.namespace("io.kestra.dev")
.build();
Optional<ConstraintViolationException> validate = original.validateUpdate(updated);
assertThat(validate.isPresent()).isTrue();
assertThat(validate.get().getMessage()).contains("Illegal namespace update");
}
}

View File

@@ -60,15 +60,6 @@ class SystemInformationReportTest {
return setting;
}
@Override
public Setting internalSave(Setting setting) throws ConstraintViolationException {
if (setting.getKey().equals(Setting.INSTANCE_UUID)) {
UUID = setting.getValue();
}
return setting;
}
@Override
public Setting delete(Setting setting) {
return setting;

View File

@@ -1,9 +1,9 @@
package io.kestra.core.repositories;
import com.devskiller.friendly_id.FriendlyId;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.exceptions.InvalidQueryFiltersException;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.Label;
import io.kestra.core.models.QueryFilter;
@@ -24,6 +24,7 @@ import io.kestra.core.models.flows.State.Type;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.ResolvedTask;
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
import io.kestra.core.serializers.JacksonMapper;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.NamespaceUtils;
import io.kestra.core.utils.TestsUtils;
@@ -41,17 +42,18 @@ import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.event.Level;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.sql.Timestamp;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static io.kestra.core.models.flows.FlowScope.SYSTEM;
import static io.kestra.core.models.flows.FlowScope.USER;
import static java.time.temporal.ChronoUnit.MINUTES;
import static java.time.temporal.ChronoUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -79,7 +81,6 @@ public abstract class AbstractExecutionRepositoryTest {
.tenantId(tenantId)
.flowId(flowId == null ? FLOW : flowId)
.flowRevision(1)
.kind(ExecutionKind.NORMAL)
.state(finalState);
@@ -183,7 +184,6 @@ public abstract class AbstractExecutionRepositoryTest {
@ParameterizedTest
@MethodSource("filterCombinations")
@FlakyTest(description = "Filtering tests are sometimes returning 0")
void should_find_all(QueryFilter filter, int expectedSize){
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
inject(tenant, "executionTriggerId");
@@ -196,49 +196,15 @@ public abstract class AbstractExecutionRepositoryTest {
static Stream<Arguments> filterCombinations() {
return Stream.of(
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unittest").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unused").operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(USER)).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.unittest").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("not.this.one").operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittes").operation(Op.CONTAINS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.uni").operation(Op.STARTS_WITH).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittest").operation(Op.ENDS_WITH).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io\\.kestra\\.unittest").operation(Op.REGEX).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("io.kestra.unittest", "unused")).operation(Op.IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("unused.first", "unused.second")).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra").operation(Op.PREFIX).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.NORMAL).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.TEST).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.NORMAL, ExecutionKind.PLAYGROUND)).operation(Op.IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.PLAYGROUND, ExecutionKind.TEST)).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "unknown")).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value", "key2", "value2")).operation(Op.IN).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key1", "value1")).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value("value").operation(Op.CONTAINS).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.EQUALS).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.NOT_EQUALS).build(), 13),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ul").operation(Op.CONTAINS).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.STARTS_WITH).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ull").operation(Op.ENDS_WITH).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("[ful]{4}").operation(Op.REGEX).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other")).operation(Op.IN).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other2")).operation(Op.NOT_IN).build(), 13),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.PREFIX).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.STATE).value(Type.RUNNING).operation(Op.EQUALS).build(), 5),
Arguments.of(QueryFilter.builder().field(Field.TRIGGER_EXECUTION_ID).value("executionTriggerId").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.NOT_EQUALS).build(), 0)
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29)
);
}
@@ -690,65 +656,6 @@ public abstract class AbstractExecutionRepositoryTest {
assertThat(data).first().hasFieldOrPropertyWithValue("id", execution.getId());
}
@Test
void dashboard_fetchData_365Days_verifiesDateGrouping() throws IOException {
var tenantId = TestsUtils.randomTenant(this.getClass().getSimpleName());
var executionDuration = Duration.ofMinutes(220);
var executionCreateDate = Instant.now();
// Create an execution within the 365-day range
Execution execution = Execution.builder()
.tenantId(tenantId)
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.flowId("some-execution")
.flowRevision(1)
.labels(Label.from(Map.of("country", "FR")))
.state(new State(Type.SUCCESS,
List.of(new State.History(State.Type.CREATED, executionCreateDate), new State.History(Type.SUCCESS, executionCreateDate.plus(executionDuration)))))
.taskRunList(List.of())
.build();
execution = executionRepository.save(execution);
// Create an execution BEYOND 365 days (400 days ago) - should be filtered out
var executionCreateDateOld = Instant.now().minus(Duration.ofDays(400));
Execution executionOld = Execution.builder()
.tenantId(tenantId)
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.flowId("some-execution-old")
.flowRevision(1)
.labels(Label.from(Map.of("country", "US")))
.state(new State(Type.SUCCESS,
List.of(new State.History(State.Type.CREATED, executionCreateDateOld), new State.History(Type.SUCCESS, executionCreateDateOld.plus(executionDuration)))))
.taskRunList(List.of())
.build();
executionRepository.save(executionOld);
var now = ZonedDateTime.now();
ArrayListTotal<Map<String, Object>> data = executionRepository.fetchData(tenantId, Executions.builder()
.type(Executions.class.getName())
.columns(Map.of(
"count", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).agg(AggregationType.COUNT).build(),
"id", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).build(),
"date", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.START_DATE).build(),
"duration", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.DURATION).build()
)).build(),
now.minusDays(365),
now,
null
);
// Should only return 1 execution (the recent one), not the 400-day-old execution
assertThat(data.getTotal()).isGreaterThanOrEqualTo(1L);
assertThat(data).isNotEmpty();
assertThat(data).first().hasFieldOrProperty("count");
}
private static Execution buildWithCreatedDate(String tenant, Instant instant) {
return Execution.builder()
.id(IdUtils.create())

View File

@@ -121,8 +121,7 @@ public abstract class AbstractFlowRepositoryTest {
QueryFilter.builder().field(Field.QUERY).value("filterFlowId").operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.NAMESPACE).value(SYSTEM_FLOWS_DEFAULT_NAMESPACE).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.FLOW_ID).value("filterFlowId").operation(Op.EQUALS).build()
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build()
);
}
@@ -146,6 +145,7 @@ public abstract class AbstractFlowRepositoryTest {
static Stream<QueryFilter> errorFilterCombinations() {
return Stream.of(
QueryFilter.builder().field(Field.FLOW_ID).value("sleep").operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(),
QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(),
QueryFilter.builder().field(Field.STATE).value(State.Type.RUNNING).operation(Op.EQUALS).build(),

View File

@@ -1,92 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
@KestraTest(startRunner = true)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public abstract class AbstractRunnerConcurrencyTest {
@Inject
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-cancel.yml"}, tenantId = "concurrency-cancel")
void concurrencyCancel() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancel("concurrency-cancel");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-fail.yml"}, tenantId = "concurrency-fail")
void concurrencyFail() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyFail("concurrency-fail");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue.yml"}, tenantId = "concurrency-queue")
void concurrencyQueue() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueue("concurrency-queue");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-pause.yml"}, tenantId = "concurrency-queue-pause")
protected void concurrencyQueuePause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueuePause("concurrency-queue-pause");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-cancel-pause.yml"}, tenantId = "concurrency-cancel-pause")
protected void concurrencyCancelPause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancelPause("concurrency-cancel-pause");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = "flow-concurrency-with-for-each-item")
protected void flowConcurrencyWithForEachItem() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem("flow-concurrency-with-for-each-item");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-fail.yml"}, tenantId = "concurrency-queue-restarted")
protected void concurrencyQueueRestarted() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted("concurrency-queue-restarted");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-after-execution.yml"}, tenantId = "concurrency-queue-after-execution")
void concurrencyQueueAfterExecution() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution("concurrency-queue-after-execution");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = "flow-concurrency-subflow")
void flowConcurrencySubflow() throws Exception {
flowConcurrencyCaseTest.flowConcurrencySubflow("flow-concurrency-subflow");
}
@Test
@FlakyTest(description = "Only flaky in CI")
@LoadFlows(
value = {"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"},
tenantId = "flow-concurrency-parallel-subflow-kill"
)
protected void flowConcurrencyParallelSubflowKill() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill("flow-concurrency-parallel-subflow-kill");
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-killed.yml"}, tenantId = "flow-concurrency-killed")
void flowConcurrencyKilled() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyKilled("flow-concurrency-killed");
}
@Test
@FlakyTest(description = "Only flaky in CI")
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-killed.yml"}, tenantId = "flow-concurrency-queue-killed")
void flowConcurrencyQueueKilled() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueKilled("flow-concurrency-queue-killed");
}
}

View File

@@ -66,6 +66,9 @@ public abstract class AbstractRunnerTest {
@Inject
protected LoopUntilCaseTest loopUntilTestCaseTest;
@Inject
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
@Inject
protected ScheduleDateCaseTest scheduleDateCaseTest;
@@ -419,6 +422,66 @@ public abstract class AbstractRunnerTest {
forEachItemCaseTest.forEachItemWithAfterExecution();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel.yml"})
void concurrencyCancel() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancel();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-fail.yml"})
void concurrencyFail() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyFail();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue.yml"})
void concurrencyQueue() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueue();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-pause.yml"})
protected void concurrencyQueuePause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueuePause();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel-pause.yml"})
protected void concurrencyCancelPause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancelPause();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
protected void flowConcurrencyWithForEachItem() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
protected void concurrencyQueueRestarted() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-after-execution.yml"})
void concurrencyQueueAfterExecution() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
void flowConcurrencySubflow() throws Exception {
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"})
void flowConcurrencyParallelSubflowKill() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill();
}
@Test
@ExecuteFlow("flows/valids/executable-fail.yml")
void badExecutable(Execution execution) {

View File

@@ -31,6 +31,7 @@ import java.util.Optional;
import java.util.concurrent.TimeoutException;
import java.util.stream.IntStream;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
@Singleton
@@ -56,42 +57,40 @@ public class FlowConcurrencyCaseTest {
@Named(QueueFactoryInterface.KILL_NAMED)
protected QueueInterface<ExecutionKilled> killQueue;
public void flowConcurrencyCancel(String tenantId) throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
public void flowConcurrencyCancel() throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
try {
List<Execution> shouldFailExecutions = List.of(
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-cancel"),
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-cancel")
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel"),
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel")
);
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(Type.CANCELLED::equals);
} finally {
runnerUtils.killExecution(execution1);
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
}
}
public void flowConcurrencyFail(String tenantId) throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
public void flowConcurrencyFail() throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
try {
List<Execution> shouldFailExecutions = List.of(
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-fail"),
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-fail")
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail"),
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail")
);
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(State.Type.FAILED::equals);
} finally {
runnerUtils.killExecution(execution1);
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
}
}
public void flowConcurrencyQueue(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
public void flowConcurrencyQueue() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue", Optional.empty())
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
@@ -107,10 +106,10 @@ public class FlowConcurrencyCaseTest {
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyQueuePause(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(tenantId, NAMESPACE, "flow-concurrency-queue-pause");
public void flowConcurrencyQueuePause() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause");
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
@@ -126,10 +125,10 @@ public class FlowConcurrencyCaseTest {
assertThat(secondExecutionResult.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyCancelPause(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(tenantId, NAMESPACE, "flow-concurrency-cancel-pause");
public void flowConcurrencyCancelPause() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause");
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.CANCELLED), execution2);
@@ -165,11 +164,11 @@ public class FlowConcurrencyCaseTest {
.toList()).contains(Type.QUEUED);
}
public void flowConcurrencyQueueRestarted(String tenantId) throws Exception {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE,
public void flowConcurrencyQueueRestarted() throws Exception {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE,
"flow-concurrency-queue-fail", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
@@ -178,10 +177,7 @@ public class FlowConcurrencyCaseTest {
// we restart the first one, it should be queued then fail again.
Execution failedExecution = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution1);
Execution restarted = executionService.restart(failedExecution, null);
Execution executionResult1 = runnerUtils.restartExecution(
e -> e.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED) && e.getState().getCurrent().equals(Type.FAILED),
restarted
);
Execution executionResult1 = runnerUtils.restartExecution(e -> e.getState().getCurrent().equals(Type.FAILED), restarted);
Execution executionResult2 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution2);
assertThat(executionResult1.getState().getCurrent()).isEqualTo(Type.FAILED);
@@ -195,10 +191,10 @@ public class FlowConcurrencyCaseTest {
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyQueueAfterExecution(String tenantId) throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
public void flowConcurrencyQueueAfterExecution() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
@@ -218,15 +214,15 @@ public class FlowConcurrencyCaseTest {
List<Execution> subFlowExecs = runnerUtils.awaitFlowExecutionNumber(2, tenantId, NAMESPACE, "flow-concurrency-cancel");
assertThat(subFlowExecs).extracting(e -> e.getState().getCurrent()).containsExactlyInAnyOrder(Type.SUCCESS, Type.CANCELLED);
// run another execution to be sure that everything works (purge is correctly done)
// run another execution to be sure that everything work (purge is correctly done)
Execution execution3 = runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-subflow");
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.SUCCESS);
runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE, "flow-concurrency-cancel");
}
public void flowConcurrencyParallelSubflowKill(String tenantId) throws QueueException {
Execution parent = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-parallel-subflow-kill", null, null, Duration.ofSeconds(30));
Execution queued = runnerUtils.awaitFlowExecution(e -> e.getState().isQueued(), tenantId, NAMESPACE, "flow-concurrency-parallel-subflow-kill-child");
public void flowConcurrencyParallelSubflowKill() throws QueueException {
Execution parent = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-parallel-subflow-kill", null, null, Duration.ofSeconds(30));
Execution queued = runnerUtils.awaitFlowExecution(e -> e.getState().isQueued(), MAIN_TENANT, NAMESPACE, "flow-concurrency-parallel-subflow-kill-child");
// Kill the parent
killQueue.emit(ExecutionKilledExecution
@@ -234,7 +230,7 @@ public class FlowConcurrencyCaseTest {
.state(ExecutionKilled.State.REQUESTED)
.executionId(parent.getId())
.isOnKillCascade(true)
.tenantId(tenantId)
.tenantId(MAIN_TENANT)
.build()
);
@@ -244,92 +240,6 @@ public class FlowConcurrencyCaseTest {
assertThat(terminated.getTaskRunList()).isNull();
}
public void flowConcurrencyKilled(String tenantId) throws QueueException, InterruptedException {
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
.orElseThrow();
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
try {
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
// we kill execution 1, execution 2 should run but not execution 3
killQueue.emit(ExecutionKilledExecution
.builder()
.state(ExecutionKilled.State.REQUESTED)
.executionId(execution1.getId())
.isOnKillCascade(true)
.tenantId(tenantId)
.build()
);
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution1);
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
assertThat(killed.getState().getHistories().stream().anyMatch(h -> h.getState() == Type.RUNNING)).isTrue();
// we now check that execution 2 is running
Execution running = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
assertThat(running.getState().getCurrent()).isEqualTo(Type.RUNNING);
// we check that execution 3 is still queued
Thread.sleep(100); // wait a little to be 100% sure
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution2);
runnerUtils.killExecution(execution3);
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
runnerUtils.awaitFlowExecutionNumber(3, tenantId, NAMESPACE, "flow-concurrency-queue-killed");
}
}
public void flowConcurrencyQueueKilled(String tenantId) throws QueueException, InterruptedException {
Flow flow = flowRepository
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
.orElseThrow();
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
try {
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
// we kill execution 2, execution 3 should not run
killQueue.emit(ExecutionKilledExecution
.builder()
.state(ExecutionKilled.State.REQUESTED)
.executionId(execution2.getId())
.isOnKillCascade(true)
.tenantId(tenantId)
.build()
);
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution2);
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
assertThat(killed.getState().getHistories().stream().noneMatch(h -> h.getState() == Type.RUNNING)).isTrue();
// we now check that execution 3 is still queued
Thread.sleep(100); // wait a little to be 100% sure
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution1);
runnerUtils.killExecution(execution3);
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
runnerUtils.awaitFlowExecutionNumber(3, tenantId, NAMESPACE, "flow-concurrency-queue-killed");
}
}
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
File tempFile = File.createTempFile("file", ".txt");

View File

@@ -2,7 +2,9 @@ package io.kestra.core.runners;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.*;
import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.FileInput;
import io.kestra.core.models.flows.input.InputAndValue;
import io.kestra.core.models.flows.input.IntInput;
@@ -30,7 +32,6 @@ import org.reactivestreams.Publisher;
import reactor.core.publisher.Mono;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import java.net.URI;
import java.nio.ByteBuffer;
@@ -44,10 +45,10 @@ import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
class FlowInputOutputTest {
private static final String TEST_SECRET_VALUE = "test-secret-value";
private static final String TEST_KV_VALUE = "test-kv-value";
static final Execution DEFAULT_TEST_EXECUTION = Execution.builder()
.id(IdUtils.create())
.flowId(IdUtils.create())
@@ -63,7 +64,7 @@ class FlowInputOutputTest {
@Inject
KvMetadataRepositoryInterface kvMetadataRepository;
@MockBean(SecretService.class)
SecretService testSecretService() {
return new SecretService() {
@@ -73,7 +74,7 @@ class FlowInputOutputTest {
}
};
}
@MockBean(KVStoreService.class)
KVStoreService testKVStoreService() {
return new KVStoreService() {
@@ -88,7 +89,7 @@ class FlowInputOutputTest {
}
};
}
@Test
void shouldResolveEnabledInputsGivenInputWithConditionalExpressionMatchingTrue() {
// Given
@@ -293,7 +294,7 @@ class FlowInputOutputTest {
values
);
}
@Test
void resolveInputsGivenDefaultExpressions() {
// Given
@@ -310,14 +311,14 @@ class FlowInputOutputTest {
.required(false)
.dependsOn(new DependsOn(List.of("input1"),null))
.build();
List<Input<?>> inputs = List.of(input1, input2);
Map<String, Object> data = Map.of("input42", "foo");
// When
List<InputAndValue> values = flowInputOutput.resolveInputs(inputs, null, DEFAULT_TEST_EXECUTION, data);
// Then
Assertions.assertEquals(
List.of(
@@ -326,7 +327,7 @@ class FlowInputOutputTest {
values
);
}
@Test
void shouldObfuscateSecretsWhenValidatingInputs() {
// Given
@@ -336,14 +337,14 @@ class FlowInputOutputTest {
.defaults(Property.ofExpression("{{ secret('???') }}"))
.required(false)
.build();
// When
List<InputAndValue> results = flowInputOutput.validateExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
Assertions.assertEquals("******", results.getFirst().value());
}
@Test
void shouldNotObfuscateSecretsInSelectWhenValidatingInputs() {
// Given
@@ -353,10 +354,10 @@ class FlowInputOutputTest {
.expression("{{ [secret('???')] }}")
.required(false)
.build();
// When
List<InputAndValue> results = flowInputOutput.validateExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
Assertions.assertEquals(TEST_SECRET_VALUE, ((MultiselectInput)results.getFirst().input()).getValues().getFirst());
}
@@ -370,14 +371,14 @@ class FlowInputOutputTest {
.defaults(Property.ofExpression("{{ secret('???') }}"))
.required(false)
.build();
// When
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
Assertions.assertEquals(TEST_SECRET_VALUE, results.get("input"));
}
@Test
void shouldEvaluateExpressionOnDefaultsUsingKVFunction() {
// Given
@@ -387,14 +388,14 @@ class FlowInputOutputTest {
.defaults(Property.ofExpression("{{ kv('???') }}"))
.required(false)
.build();
// When
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
assertThat(results.get("input")).isEqualTo(TEST_KV_VALUE);
}
@Test
void shouldGetDefaultWhenPassingNoDataForRequiredInput() {
// Given
@@ -403,84 +404,50 @@ class FlowInputOutputTest {
.type(Type.STRING)
.defaults(Property.ofValue("default"))
.build();
// When
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
// Then
assertThat(results.get("input")).isEqualTo("default");
}
@Test
void shouldResolveZeroByteFileUpload() throws java.io.IOException {
File tempFile = File.createTempFile("empty", ".txt");
tempFile.deleteOnExit();
io.micronaut.http.multipart.CompletedFileUpload fileUpload = org.mockito.Mockito.mock(io.micronaut.http.multipart.CompletedFileUpload.class);
org.mockito.Mockito.when(fileUpload.getInputStream()).thenReturn(new java.io.FileInputStream(tempFile));
org.mockito.Mockito.when(fileUpload.getFilename()).thenReturn("empty.txt");
org.mockito.Mockito.when(fileUpload.getName()).thenReturn("empty_file");
Execution execution = Execution.builder()
.id(IdUtils.create())
.tenantId("unit_test_tenant")
.namespace("io.kestra.unittest")
.flowId("unittest")
.flowRevision(1)
.state(new State())
.build();
reactor.core.publisher.Mono<Map<String, Object>> result = flowInputOutput.readExecutionInputs(
List.of(
io.kestra.core.models.flows.input.FileInput.builder().id("empty_file").type(Type.FILE).build()
),
Flow.builder().id("unittest").namespace("io.kestra.unittest").build(),
execution,
reactor.core.publisher.Flux.just(fileUpload)
);
Map<String, Object> outputs = result.block();
Assertions.assertNotNull(outputs);
Assertions.assertTrue(outputs.containsKey("empty_file"));
}
private static class MemoryCompletedPart implements CompletedPart {
protected final String name;
protected final byte[] content;
public MemoryCompletedPart(String name, byte[] content) {
this.name = name;
this.content = content;
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(content);
}
@Override
public byte[] getBytes() {
return content;
}
@Override
public ByteBuffer getByteBuffer() {
return ByteBuffer.wrap(content);
}
@Override
public Optional<MediaType> getContentType() {
return Optional.empty();
}
@Override
public String getName() {
return name;
}
}
private static final class MemoryCompletedFileUpload extends MemoryCompletedPart implements CompletedFileUpload {
private final String fileName;
@@ -489,7 +456,7 @@ class FlowInputOutputTest {
super(name, content);
this.fileName = fileName;
}
@Override
public String getFilename() {
return fileName;

View File

@@ -56,18 +56,6 @@ public class InputsTest {
@Inject
private NamespaceFactory namespaceFactory;
private static final Map<String , Object> object = Map.of(
"people", List.of(
Map.of(
"first", "Mustafa",
"last", "Tarek"
),
Map.of(
"first", "Ahmed",
"last", "Tarek"
)
)
);
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
.put("string", "myString")
.put("enum", "ENUM_VALUE")
@@ -79,6 +67,7 @@ public class InputsTest {
.put("time", "18:27:49")
.put("duration", "PT5M6S")
.put("file", Objects.requireNonNull(InputsTest.class.getClassLoader().getResource("application-test.yml")).getPath())
.put("json", "{\"a\": \"b\"}")
.put("uri", "https://www.google.com")
.put("nested.string", "a string")
.put("nested.more.int", "123")
@@ -92,14 +81,11 @@ public class InputsTest {
.put("validatedTime", "11:27:49")
.put("secret", "secret")
.put("array", "[1, 2, 3]")
.put("json1", "{\"a\": \"b\"}")
.put("json2", object)
.put("yaml1", """
.put("yaml", """
some: property
alist:
- of
- values""")
.put("yaml2", object)
.build();
@Inject
@@ -168,6 +154,7 @@ public class InputsTest {
assertThat(typeds.get("duration")).isEqualTo(Duration.parse("PT5M6S"));
assertThat((URI) typeds.get("file")).isEqualTo(new URI("kestra:///io/kestra/tests/inputs/executions/test/inputs/file/application-test.yml"));
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get("tenant1", null, (URI) typeds.get("file"))))).isEqualTo(CharStreams.toString(new InputStreamReader(new FileInputStream((String) inputs.get("file")))));
assertThat(typeds.get("json")).isEqualTo(Map.of("a", "b"));
assertThat(typeds.get("uri")).isEqualTo("https://www.google.com");
assertThat(((Map<String, Object>) typeds.get("nested")).get("string")).isEqualTo("a string");
assertThat((Boolean) ((Map<String, Object>) typeds.get("nested")).get("bool")).isTrue();
@@ -183,12 +170,9 @@ public class InputsTest {
assertThat(typeds.get("array")).isInstanceOf(List.class);
assertThat((List<Integer>) typeds.get("array")).hasSize(3);
assertThat((List<Integer>) typeds.get("array")).isEqualTo(List.of(1, 2, 3));
assertThat(typeds.get("json1")).isEqualTo(Map.of("a", "b"));
assertThat(typeds.get("json2")).isEqualTo(object);
assertThat(typeds.get("yaml1")).isEqualTo(Map.of(
assertThat(typeds.get("yaml")).isEqualTo(Map.of(
"some", "property",
"alist", List.of("of", "values")));
assertThat(typeds.get("yaml2")).isEqualTo(object);
}
@Test
@@ -217,7 +201,7 @@ public class InputsTest {
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs)
);
assertThat(execution.getTaskRunList()).hasSize(16);
assertThat(execution.getTaskRunList()).hasSize(14);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).matches("kestra:///io/kestra/tests/inputs/executions/.*/inputs/file/application-test.yml");
// secret inputs are decrypted to be used as task properties
@@ -370,19 +354,19 @@ public class InputsTest {
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant14")
void inputEmptyJson() {
HashMap<String, Object> map = new HashMap<>(inputs);
map.put("json1", "{}");
map.put("json", "{}");
Map<String, Object> typeds = typedInputs(map, "tenant14");
assertThat(typeds.get("json1")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) typeds.get("json1")).size()).isZero();
assertThat(typeds.get("json")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) typeds.get("json")).size()).isZero();
}
@Test
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant15")
void inputEmptyJsonFlow() throws TimeoutException, QueueException {
HashMap<String, Object> map = new HashMap<>(inputs);
map.put("json1", "{}");
map.put("json", "{}");
Execution execution = runnerUtils.runOne(
"tenant15",
@@ -392,11 +376,11 @@ public class InputsTest {
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, map)
);
assertThat(execution.getTaskRunList()).hasSize(16);
assertThat(execution.getTaskRunList()).hasSize(14);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getInputs().get("json1")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) execution.getInputs().get("json1")).size()).isZero();
assertThat(execution.getInputs().get("json")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) execution.getInputs().get("json")).size()).isZero();
assertThat((String) execution.findTaskRunsByTaskId("jsonOutput").getFirst().getOutputs().get("value")).isEqualTo("{}");
}

View File

@@ -122,10 +122,10 @@ class YamlParserTest {
void inputs() {
Flow flow = this.parse("flows/valids/inputs.yaml");
assertThat(flow.getInputs().size()).isEqualTo(31);
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(12L);
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(19L);
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(4L);
assertThat(flow.getInputs().size()).isEqualTo(29);
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(11L);
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(18L);
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(3L);
assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput stringInput && stringInput.getValidator() != null).count()).isEqualTo(1L);
}

View File

@@ -1,24 +1,15 @@
package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest
@KestraTest
public class EditionProviderTest {
@Inject
private EditionProvider editionProvider;
@Inject
private SettingRepositoryInterface settingRepository;
protected EditionProvider.Edition expectedEdition() {
return EditionProvider.Edition.OSS;
}
@@ -26,10 +17,5 @@ public class EditionProviderTest {
@Test
void shouldReturnCurrentEdition() {
Assertions.assertEquals(expectedEdition(), editionProvider.get());
// check that the edition is persisted in settings
Optional<Setting> editionSettings = settingRepository.findByKey(Setting.INSTANCE_EDITION);
assertThat(editionSettings).isPresent();
assertThat(editionSettings.get().getValue()).isEqualTo(expectedEdition().name());
}
}

View File

@@ -48,8 +48,8 @@ class ListUtilsTest {
void convertToListString(){
assertThat(ListUtils.convertToListString(List.of("string1", "string2"))).isEqualTo(List.of("string1", "string2"));
assertThat(ListUtils.convertToListString(List.of())).isEqualTo(List.of());
assertThat(ListUtils.convertToListString(List.of(1, 2, 3))).isEqualTo(List.of("1", "2", "3"));
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString("not a list"));
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString(List.of(1, 2, 3)));
}
}

View File

@@ -1,107 +1,48 @@
package io.kestra.core.utils;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.AppenderBase;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.triggers.TriggerContext;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import static org.assertj.core.api.Assertions.assertThat;
@Slf4j
class LogsTest {
private static final InMemoryAppender MEMORY_APPENDER = new InMemoryAppender();
@BeforeAll
static void setupLogger() {
Logger logger = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
MEMORY_APPENDER.setContext((LoggerContext) LoggerFactory.getILoggerFactory());
MEMORY_APPENDER.start();
logger.addAppender(MEMORY_APPENDER);
}
@AfterEach
void clearLogs() {
MEMORY_APPENDER.clear();
}
@Test
void logFlow() {
var flow = Flow.builder().tenantId("tenant").namespace("namespace").id("flow").build();
var flow = Flow.builder().namespace("namespace").id("flow").build();
Logs.logExecution(flow, log, Level.INFO, "Some log");
Logs.logExecution(flow, log, Level.INFO, "Some log with an {}", "attribute");
Logs.logExecution(flow, log, Level.ERROR, "Some log with an {} and an error", "attribute", new RuntimeException("Test Exception"));
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(3);
}
@Test
void logExecution() {
var execution = Execution.builder().tenantId("tenant").namespace("namespace").flowId("flow").id("execution").build();
var execution = Execution.builder().namespace("namespace").flowId("flow").id("execution").build();
Logs.logExecution(execution, log, Level.INFO, "Some log");
Logs.logExecution(execution, log, Level.INFO, "Some log with an {}", "attribute");
Logs.logExecution(execution, Level.INFO, "Some log");
Logs.logExecution(execution, Level.INFO, "Some log with an {}", "attribute");
Logs.logExecution(execution, Level.INFO, "Some log");
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(3);
assertThat(logs.getFirst().getLoggerName()).isEqualTo("executor.tenant.namespace.flow");
}
@Test
void logTrigger() {
var trigger = TriggerContext.builder().tenantId("tenant").namespace("namespace").flowId("flow").triggerId("trigger").build();
var trigger = TriggerContext.builder().namespace("namespace").flowId("flow").triggerId("trigger").build();
Logs.logTrigger(trigger, log, Level.INFO, "Some log");
Logs.logTrigger(trigger, log, Level.INFO, "Some log with an {}", "attribute");
Logs.logTrigger(trigger, Level.INFO, "Some log");
Logs.logTrigger(trigger, Level.INFO, "Some log with an {}", "attribute");
Logs.logTrigger(trigger, Level.INFO, "Some log");
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(3);
assertThat(logs.getFirst().getLoggerName()).isEqualTo("scheduler.tenant.namespace.flow.trigger");
}
@Test
void logTaskRun() {
var taskRun = TaskRun.builder().tenantId("tenant").namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").build();
var taskRun = TaskRun.builder().namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").build();
Logs.logTaskRun(taskRun, Level.INFO, "Some log");
Logs.logTaskRun(taskRun, Level.INFO, "Some log with an {}", "attribute");
taskRun = TaskRun.builder().namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").value("value").build();
Logs.logTaskRun(taskRun, Level.INFO, "Some log");
Logs.logTaskRun(taskRun, Level.INFO, "Some log with an {}", "attribute");
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
assertThat(logs).hasSize(4);
assertThat(logs.getFirst().getLoggerName()).isEqualTo("worker.tenant.namespace.flow.task");
}
private static class InMemoryAppender extends AppenderBase<ILoggingEvent> {
private final List<ILoggingEvent> logs = new CopyOnWriteArrayList<>();
@Override
protected void append(ILoggingEvent event) {
logs.add(event);
}
public List<ILoggingEvent> getLogs() {
return logs;
}
public void clear() {
logs.clear();
}
}
}

View File

@@ -216,23 +216,4 @@ class MapUtilsTest {
"k1.k4", "v2"
));
}
@Test
@SuppressWarnings("unchecked")
void mergeShouldNotDuplicateListElements() {
Map<String, Object> first = Map.of(
"key1", "value1",
"key2", List.of("something", "else")
);
Map<String, Object> second = Map.of(
"key2", List.of("something", "other"),
"key3", "value3"
);
Map<String, Object> results = MapUtils.merge(first, second);
assertThat(results).hasSize(3);
List<String> list = (List<String>) results.get("key2");
assertThat(list).hasSize(3);
}
}

View File

@@ -20,6 +20,7 @@ import org.junit.jupiter.api.parallel.ExecutionMode;
import reactor.core.publisher.Flux;
import java.io.ByteArrayInputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.time.Duration;
@@ -44,6 +45,9 @@ class NamespaceFilesUtilsTest {
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
QueueInterface<LogEntry> workerTaskLogQueue;
@Inject
NamespaceFilesUtils namespaceFilesUtils;
@Inject
NamespaceFactory namespaceFactory;
@@ -62,7 +66,7 @@ class NamespaceFilesUtilsTest {
namespaceStorage.putFile(Path.of("/" + i + ".txt"), data);
}
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().build());
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().build());
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
receive.blockLast();
@@ -87,7 +91,7 @@ class NamespaceFilesUtilsTest {
namespaceStorage.putFile(Path.of("/" + i + ".txt"), data);
}
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
receive.blockLast();
@@ -112,7 +116,7 @@ class NamespaceFilesUtilsTest {
namespaceStorage.putFile(Path.of("/folder2/test.txt"), data);
namespaceStorage.putFile(Path.of("/test.txt"), data);
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
receive.blockLast();
@@ -137,7 +141,7 @@ class NamespaceFilesUtilsTest {
namespaceFactory.of(MAIN_TENANT, ns1, storageInterface).putFile(Path.of("/test.txt"), data);
namespaceFactory.of(MAIN_TENANT, ns2, storageInterface).putFile(Path.of("/test.txt"), data);
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder()
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder()
.namespaces(Property.ofValue(List.of(ns1, ns2)))
.folderPerNamespace(Property.ofValue(true))
.build());

View File

@@ -1,30 +0,0 @@
package io.kestra.core.utils;
import io.kestra.core.models.Setting;
import io.kestra.core.repositories.SettingRepositoryInterface;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@MicronautTest
class VersionProviderTest {
@Inject
private VersionProvider versionProvider;
@Inject
private SettingRepositoryInterface settingRepository;
@Test
void shouldResolveVersion() {
assertThat(versionProvider.getVersion()).endsWith("-SNAPSHOT");
// check that the version is persisted in settings
Optional<Setting> versionSettings = settingRepository.findByKey(Setting.INSTANCE_VERSION);
assertThat(versionSettings).isPresent();
assertThat(versionSettings.get().getValue()).isEqualTo(versionProvider.getVersion());
}
}

View File

@@ -9,15 +9,9 @@ import io.kestra.core.utils.TestsUtils;
import io.kestra.core.junit.annotations.KestraTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import io.kestra.core.models.validations.ValidateConstraintViolation;
import io.kestra.core.services.FlowService;
import jakarta.validation.ConstraintViolationException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonLocation;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.List;
import java.io.File;
import java.net.URL;
import java.util.Optional;
@@ -29,107 +23,6 @@ class FlowValidationTest {
@Inject
private ModelValidator modelValidator;
@Inject
private FlowService flowService;
private static final ObjectMapper mapper = new ObjectMapper();
// Helper class to create JsonProcessingException with location
private static class TestJsonProcessingException extends JsonProcessingException {
public TestJsonProcessingException(String msg, JsonLocation location) {
super(msg, location);
}
public TestJsonProcessingException(String msg) {
super(msg);
}
}
@Test
void testFormatYamlErrorMessage_WithExpectedFieldName() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Expected a field name", new JsonLocation(null, 100, 5, 10));
Object dummyTarget = new Object(); // Dummy target for toConstraintViolationException
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").contains("(at line 5)");
}
@Test
void testFormatYamlErrorMessage_WithMappingStartEvent() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("MappingStartEvent", new JsonLocation(null, 200, 3, 5));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Unexpected mapping start").contains("(at line 3)");
}
@Test
void testFormatYamlErrorMessage_WithScalarValue() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Scalar value", new JsonLocation(null, 150, 7, 12));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Expected a simple value").contains("(at line 7)");
}
@Test
void testFormatYamlErrorMessage_GenericError() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Some other error", new JsonLocation(null, 50, 2, 8));
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML parsing error: Some other error").contains("(at line 2)");
}
@Test
void testFormatYamlErrorMessage_NoLocation() throws JsonProcessingException {
JsonProcessingException e = new TestJsonProcessingException("Expected a field name");
Object dummyTarget = new Object();
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").doesNotContain("at line");
}
@Test
void testValidateFlowWithYamlSyntaxError() {
String invalidYaml = """
id: test-flow
namespace: io.kestra.unittest
tasks:
- id:hello
type: io.kestra.plugin.core.log.Log
message: {{ abc }}
""";
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", invalidYaml);
assertThat(results).hasSize(1);
assertThat(results.getFirst().getConstraints()).contains("YAML parsing error").contains("at line");
}
@Test
void testValidateFlowWithUndefinedVariable() {
String yamlWithUndefinedVar = """
id: test-flow
namespace: io.kestra.unittest
tasks:
- id: hello
type: io.kestra.plugin.core.log.Log
message: {{ undefinedVar }}
""";
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", yamlWithUndefinedVar);
assertThat(results).hasSize(1);
assertThat(results.getFirst().getConstraints()).contains("Validation error");
}
@Test
void invalidRecursiveFlow() {
Flow flow = this.parse("flows/invalids/recursive-flow.yaml");
@@ -237,4 +130,4 @@ class FlowValidationTest {
return YamlParser.parse(file, Flow.class);
}
}
}

View File

@@ -8,7 +8,6 @@ import io.kestra.core.models.flows.Output;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.History;
import io.kestra.core.runners.DefaultRunContext;
import io.kestra.core.runners.InputAndOutput;
import io.kestra.core.runners.SubflowExecutionResult;
import io.kestra.core.services.VariablesService;
import io.micronaut.context.ApplicationContext;
@@ -47,15 +46,11 @@ class SubflowTest {
@Mock
private ApplicationContext applicationContext;
@Mock
private InputAndOutput inputAndOutput;
@BeforeEach
void beforeEach() {
Mockito.when(applicationContext.getBean(VariablesService.class)).thenReturn(new VariablesService());
Mockito.when(runContext.logger()).thenReturn(LOG);
Mockito.when(runContext.getApplicationContext()).thenReturn(applicationContext);
Mockito.when(runContext.inputAndOutput()).thenReturn(inputAndOutput);
}
@Test
@@ -123,7 +118,7 @@ class SubflowTest {
Map<String, Object> outputs = Map.of("key", "value");
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(outputs);
Mockito.when(inputAndOutput.renderOutputs(Mockito.anyList())).thenReturn(Map.of("key", "value"));
Subflow subflow = Subflow.builder()
.outputs(outputs)
@@ -164,7 +159,6 @@ class SubflowTest {
Output output = Output.builder().id("key").value("value").build();
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(Map.of(output.getId(), output.getValue()));
Mockito.when(inputAndOutput.typedOutputs(Mockito.any(), Mockito.any(), Mockito.anyMap())).thenReturn(Map.of("key", "value"));
Flow flow = Flow.builder()
.outputs(List.of(output))
.build();

View File

@@ -1,11 +1,9 @@
package io.kestra.plugin.core.flow;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.as;
import static org.assertj.core.api.Assertions.assertThat;
import com.google.common.collect.ImmutableMap;
import io.kestra.core.junit.annotations.ExecuteFlow;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
@@ -102,14 +100,4 @@ class SwitchTest {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
}
@Test
@ExecuteFlow("flows/valids/switch-in-concurrent-loop.yaml")
void switchInConcurrentLoop(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getTaskRunList()).hasSize(5);
// we check that OOMCRM_EB_DD_000 and OOMCRM_EB_DD_001 have been processed once
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_000")).count()).isEqualTo(1);
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_001")).count()).isEqualTo(1);
}
}

View File

@@ -57,7 +57,7 @@ class ScheduleOnDatesTest {
}
@Test
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() {
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() throws Exception {
// given
var now = ZonedDateTime.now();
var before = now.minusMinutes(1).truncatedTo(ChronoUnit.SECONDS);
@@ -75,7 +75,7 @@ class ScheduleOnDatesTest {
ZonedDateTime nextDate = scheduleOnDates.nextEvaluationDate(conditionContext, Optional.empty());
// then
assertThat(nextDate).isEqualTo(after);
assertThat(nextDate).isEqualTo(before);
}
@Test

View File

@@ -13,7 +13,6 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.StringInput;
import io.kestra.core.models.flows.input.MultiselectInput;
import io.kestra.core.models.triggers.AbstractTrigger;
import io.kestra.core.models.triggers.TriggerContext;
import io.kestra.core.runners.RunContextFactory;
@@ -104,9 +103,8 @@ class ScheduleTest {
);
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).hasSize(4);
assertThat(evaluate.get().getLabels()).hasSize(3);
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
var vars = evaluate.get().getTrigger().getVariables();
var inputs = evaluate.get().getInputs();
@@ -139,9 +137,8 @@ class ScheduleTest {
);
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).hasSize(4);
assertThat(evaluate.get().getLabels()).hasSize(3);
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
var inputs = evaluate.get().getInputs();
@@ -478,81 +475,6 @@ class ScheduleTest {
assertThat(result.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
}
@Test
void successWithMultiselectInputDefaults() throws Exception {
Schedule trigger = Schedule.builder().id("schedule").type(Schedule.class.getName()).cron("0 0 1 * *").build();
ZonedDateTime date = ZonedDateTime.now()
.withDayOfMonth(1)
.withHour(0)
.withMinute(0)
.withSecond(0)
.truncatedTo(ChronoUnit.SECONDS)
.minusMonths(1);
Optional<Execution> evaluate = trigger.evaluate(
conditionContextWithMultiselectInput(trigger),
triggerContext(date, trigger));
assertThat(evaluate.isPresent()).isTrue();
var inputs = evaluate.get().getInputs();
// Verify MULTISELECT input with explicit defaults works correctly
assertThat(inputs.get("multiselectInput")).isEqualTo(List.of("option1", "option2"));
}
@Test
void successWithMultiselectInputAutoSelectFirst() throws Exception {
Schedule trigger = Schedule.builder().id("schedule").type(Schedule.class.getName()).cron("0 0 1 * *").build();
ZonedDateTime date = ZonedDateTime.now()
.withDayOfMonth(1)
.withHour(0)
.withMinute(0)
.withSecond(0)
.truncatedTo(ChronoUnit.SECONDS)
.minusMonths(1);
Optional<Execution> evaluate = trigger.evaluate(
conditionContextWithMultiselectAutoSelectFirst(trigger),
triggerContext(date, trigger));
assertThat(evaluate.isPresent()).isTrue();
var inputs = evaluate.get().getInputs();
// Verify MULTISELECT input with autoSelectFirst defaults to first option
assertThat(inputs.get("multiselectAutoSelect")).isEqualTo(List.of("first"));
}
@Test
void successWithMultiselectInputProvidedValue() throws Exception {
// Test that provided values override defaults for MULTISELECT
Schedule trigger = Schedule.builder()
.id("schedule")
.type(Schedule.class.getName())
.cron("0 0 1 * *")
.inputs(Map.of("multiselectInput", List.of("option3")))
.build();
ZonedDateTime date = ZonedDateTime.now()
.withDayOfMonth(1)
.withHour(0)
.withMinute(0)
.withSecond(0)
.truncatedTo(ChronoUnit.SECONDS)
.minusMonths(1);
Optional<Execution> evaluate = trigger.evaluate(
conditionContextWithMultiselectInput(trigger),
triggerContext(date, trigger));
assertThat(evaluate.isPresent()).isTrue();
var inputs = evaluate.get().getInputs();
// Verify provided value overrides defaults
assertThat(inputs.get("multiselectInput")).isEqualTo(List.of("option3"));
}
private ConditionContext conditionContext(AbstractTrigger trigger) {
Flow flow = Flow.builder()
.id(IdUtils.create())
@@ -582,79 +504,17 @@ class ScheduleTest {
.build();
}
private ConditionContext conditionContextWithMultiselectInput(AbstractTrigger trigger) {
Flow flow = Flow.builder()
.id(IdUtils.create())
.namespace("io.kestra.tests")
.labels(
List.of(
new Label("flow-label-1", "flow-label-1"),
new Label("flow-label-2", "flow-label-2")))
.variables(Map.of("custom_var", "VARIABLE VALUE"))
.inputs(List.of(
MultiselectInput.builder()
.id("multiselectInput")
.type(Type.MULTISELECT)
.values(List.of("option1", "option2", "option3"))
.defaults(Property.ofValue(List.of("option1", "option2")))
.build()))
.build();
TriggerContext triggerContext = TriggerContext.builder()
.namespace(flow.getNamespace())
.flowId(flow.getId())
.triggerId(trigger.getId())
.build();
return ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(),
triggerContext, trigger))
.flow(flow)
.build();
}
private ConditionContext conditionContextWithMultiselectAutoSelectFirst(AbstractTrigger trigger) {
Flow flow = Flow.builder()
.id(IdUtils.create())
.namespace("io.kestra.tests")
.labels(
List.of(
new Label("flow-label-1", "flow-label-1"),
new Label("flow-label-2", "flow-label-2")))
.variables(Map.of("custom_var", "VARIABLE VALUE"))
.inputs(List.of(
MultiselectInput.builder()
.id("multiselectAutoSelect")
.type(Type.MULTISELECT)
.values(List.of("first", "second", "third"))
.autoSelectFirst(true)
.build()))
.build();
TriggerContext triggerContext = TriggerContext.builder()
.namespace(flow.getNamespace())
.flowId(flow.getId())
.triggerId(trigger.getId())
.build();
return ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(),
triggerContext, trigger))
.flow(flow)
.build();
}
private ZonedDateTime dateFromVars(String date, ZonedDateTime expexted) {
return ZonedDateTime.parse(date).withZoneSameInstant(expexted.getZone());
}
@Test
void shouldGetNextExecutionDateWithConditionMatchingFutureDate() throws InternalException {
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
OffsetTime before = now.minusHours(1).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
OffsetTime after = now.minusHours(4).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
Schedule trigger = Schedule.builder()
.id("schedule").type(Schedule.class.getName())
.cron("0 * * * *") // every hour
@@ -667,25 +527,25 @@ class ScheduleTest {
.build()
))
.build();
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
ConditionContext conditionContext = ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
.build();
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
assertThat(result).isNotEmpty();
}
@Test
void shouldGetNextExecutionDateWithConditionMatchingCurrentDate() throws InternalException {
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
OffsetTime before = now.plusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
OffsetTime after = now.minusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
Schedule trigger = Schedule.builder()
.id("schedule").type(Schedule.class.getName())
.cron("*/30 * * * * *")
@@ -698,13 +558,13 @@ class ScheduleTest {
.build()
))
.build();
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
ConditionContext conditionContext = ConditionContext.builder()
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
.build();
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
assertThat(result).isNotEmpty();
}

View File

@@ -8,4 +8,4 @@ concurrency:
tasks:
- id: sleep
type: io.kestra.plugin.core.flow.Sleep
duration: PT2S
duration: PT10S

View File

@@ -1,11 +0,0 @@
id: flow-concurrency-queue-killed
namespace: io.kestra.tests
concurrency:
behavior: QUEUE
limit: 1
tasks:
- id: sleep
type: io.kestra.plugin.core.flow.Sleep
duration: PT1M

View File

@@ -41,10 +41,7 @@ inputs:
- id: instantDefaults
type: DATETIME
defaults: "2013-08-09T14:19:00Z"
- id: json1
type: JSON
required: false
- id: json2
- id: json
type: JSON
required: false
- id: uri
@@ -98,7 +95,7 @@ inputs:
- name: array
type: ARRAY
itemType: INT
- name: yaml1
- name: yaml
type: YAML
defaults:
property: something
@@ -107,15 +104,6 @@ inputs:
value: value1
- key: key2
value: value2
- name: yaml2
type: YAML
defaults:
property: something
list:
- key: key1
value: value1
- key: key2
value: value2
# required true and an empty default value will only work if we correctly serialize default values which is what this input is about to test.
- name: empty
type: STRING
@@ -152,18 +140,12 @@ tasks:
type: io.kestra.plugin.core.debug.Return
format: "{{taskrun.value}}"
- id: json1
- id: json
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.json1}}"
- id: json2
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.json2}}"
format: "{{inputs.json}}"
- id: jsonOutput
type: io.kestra.plugin.core.debug.Return
format: "{{outputs.json1.value}}"
- id: yamlOutput1
format: "{{outputs.json.value}}"
- id: yamlOutput
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.yaml1}}"
- id: yamlOutput2
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.yaml2}}"
format: "{{inputs.yaml}}"

View File

@@ -1,23 +0,0 @@
id: switch-in-concurrent-loop
namespace: io.kestra.tests
tasks:
- id: iterate_and_check_name
type: io.kestra.plugin.core.flow.ForEach
tasks:
- id: switch
type: io.kestra.plugin.core.flow.Switch
value: "{{ taskrun.value }}"
cases:
"Alice":
- id: OOMCRM_EB_DD_000
type: io.kestra.plugin.core.log.Log
message: Alice
"Bob":
- id: OOMCRM_EB_DD_001
type: io.kestra.plugin.core.log.Log
message: Bob
values: ["Alice", "Bob"]
concurrencyLimit: 0

View File

@@ -13,19 +13,18 @@ tasks:
- io.test.second
- io.test.third
enabled: true
folderPerNamespace: true
exclude:
- /ignore/**
tasks:
- id: t1
type: io.kestra.core.tasks.test.Read
path: "/io.test.third/test/a/b/c/1.txt"
path: "/test/a/b/c/1.txt"
- id: t2
type: io.kestra.core.tasks.test.Read
path: "/io.test.second/a/b/c/2.txt"
path: "/a/b/c/2.txt"
- id: t3
type: io.kestra.core.tasks.test.Read
path: "/io.test.first/a/b/3.txt"
path: "/a/b/3.txt"
- id: t4
type: io.kestra.core.tasks.test.Read
path: "/ignore/4.txt"

View File

@@ -402,11 +402,10 @@ public class ExecutorService {
if (flow.getOutputs() != null) {
RunContext runContext = runContextFactory.of(executor.getFlow(), executor.getExecution());
var inputAndOutput = runContext.inputAndOutput();
try {
Map<String, Object> outputs = inputAndOutput.renderOutputs(flow.getOutputs());
outputs = inputAndOutput.typedOutputs(flow, executor.getExecution(), outputs);
Map<String, Object> outputs = FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext);
outputs = flowInputOutput.typedOutputs(flow, executor.getExecution(), outputs);
newExecution = newExecution.withOutputs(outputs);
} catch (Exception e) {
Logs.logExecution(

View File

@@ -16,7 +16,7 @@ public final class H2RepositoryUtils {
case MONTH:
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM')", Date.class);
case WEEK:
return DSL.field("DATE_TRUNC('WEEK', \"" + dateField + "\")", Date.class);
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'YYYY-ww')", Date.class);
case DAY:
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM-dd')", Date.class);
case HOUR:

View File

@@ -3,5 +3,5 @@ package io.kestra.repository.h2;
import io.kestra.jdbc.repository.AbstractJdbcFlowRepositoryTest;
public class H2FlowRepositoryTest extends AbstractJdbcFlowRepositoryTest {
}

View File

@@ -1,6 +0,0 @@
package io.kestra.runner.h2;
import io.kestra.jdbc.runner.JdbcConcurrencyRunnerTest;
public class H2RunnerConcurrencyTest extends JdbcConcurrencyRunnerTest {
}

Some files were not shown because too many files have changed in this diff Show More