mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-25 11:12:12 -05:00
Compare commits
147 Commits
run-develo
...
docs/purge
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42c8334e2e | ||
|
|
7ea95f393e | ||
|
|
6935900699 | ||
|
|
123d7fb426 | ||
|
|
0bc8e8d74a | ||
|
|
e0c3cfa1f9 | ||
|
|
7f77b24ae0 | ||
|
|
ec6820dc25 | ||
|
|
d94193c143 | ||
|
|
c9628047fa | ||
|
|
4cbc069af4 | ||
|
|
eabe573fe6 | ||
|
|
ecd64617c3 | ||
|
|
a5650bca0f | ||
|
|
ed59e262d4 | ||
|
|
a5f9d54f7d | ||
|
|
47f4f43198 | ||
|
|
5d31c97f7f | ||
|
|
f8107285c4 | ||
|
|
8dc8dc1796 | ||
|
|
834dfd2947 | ||
|
|
6edb88841f | ||
|
|
5653531628 | ||
|
|
ee61276106 | ||
|
|
abcf76f7b4 | ||
|
|
67ada7f61b | ||
|
|
0c13633f77 | ||
|
|
a6cf2015ff | ||
|
|
2f9216c70b | ||
|
|
1903e6fac5 | ||
|
|
2d2cb00cab | ||
|
|
01b5441d16 | ||
|
|
efc778e294 | ||
|
|
60235a4e73 | ||
|
|
b167c52e76 | ||
|
|
216b124294 | ||
|
|
b6e4df8de2 | ||
|
|
429e7c7945 | ||
|
|
e302b4be4a | ||
|
|
8e7ad9ae25 | ||
|
|
41a11abf16 | ||
|
|
1be16d5e9d | ||
|
|
e263224d7b | ||
|
|
12b89588a6 | ||
|
|
eae5eb80cb | ||
|
|
c0f6298484 | ||
|
|
ba1d6b2232 | ||
|
|
048dcb80cc | ||
|
|
a81de811d7 | ||
|
|
a960a9f982 | ||
|
|
c4d4fd935f | ||
|
|
f063a5a2d9 | ||
|
|
ac91d5605f | ||
|
|
e3d3c3651b | ||
|
|
5b6836237e | ||
|
|
2f8284b133 | ||
|
|
42992fd7c3 | ||
|
|
3a481f93d3 | ||
|
|
7e964ae563 | ||
|
|
25e54edbc9 | ||
|
|
e88dc7af76 | ||
|
|
b7a027f0dc | ||
|
|
98141d6010 | ||
|
|
bf119ab6df | ||
|
|
9bd6353b77 | ||
|
|
c0ab581cf1 | ||
|
|
0f38e19663 | ||
|
|
0c14ea621c | ||
|
|
fb14e57a7c | ||
|
|
09c707d865 | ||
|
|
86e08d71dd | ||
|
|
94c00cedeb | ||
|
|
eb12832b1e | ||
|
|
687cefdfb9 | ||
|
|
8eae8aba72 | ||
|
|
abdbb8d364 | ||
|
|
8a55ab3af6 | ||
|
|
b7cb933e1e | ||
|
|
3af003e5e4 | ||
|
|
c3861a5532 | ||
|
|
ae1f10f45a | ||
|
|
612dccfb8c | ||
|
|
2ae8df2f5f | ||
|
|
1abfa74a16 | ||
|
|
69a793b227 | ||
|
|
35ccb3e39b | ||
|
|
3a7fcb2aa1 | ||
|
|
103c5b92e9 | ||
|
|
5253eeef95 | ||
|
|
848f835191 | ||
|
|
3e55e67534 | ||
|
|
7bca8b4924 | ||
|
|
56febfb415 | ||
|
|
925b8c6954 | ||
|
|
708816fe67 | ||
|
|
5502473fa4 | ||
|
|
c6cf0147a4 | ||
|
|
2951f4b4bc | ||
|
|
4ea13e258b | ||
|
|
3f8dcb47fd | ||
|
|
42dc3b930c | ||
|
|
97a78abd28 | ||
|
|
b3b2ef1b5a | ||
|
|
596a26a137 | ||
|
|
8a9a1df436 | ||
|
|
55d0880ed3 | ||
|
|
a74ebd5cd6 | ||
|
|
f3aed38964 | ||
|
|
2595e56199 | ||
|
|
e821bd7f65 | ||
|
|
09762d2a8d | ||
|
|
018c22918f | ||
|
|
3e9c8cf7da | ||
|
|
008404e442 | ||
|
|
2b224bcde8 | ||
|
|
1977b61693 | ||
|
|
8e2267f86c | ||
|
|
24355c2a88 | ||
|
|
51adcfa908 | ||
|
|
a55baa1f96 | ||
|
|
32793fde18 | ||
|
|
4381d585ec | ||
|
|
e595e26c45 | ||
|
|
b833cf28b5 | ||
|
|
ac11e9545c | ||
|
|
a07df5f6cd | ||
|
|
f626c85346 | ||
|
|
e15b53ebb5 | ||
|
|
7edb6bc379 | ||
|
|
78c81f932b | ||
|
|
56bb3ca29c | ||
|
|
14029e8c14 | ||
|
|
bea3d63d89 | ||
|
|
24a3bbd303 | ||
|
|
f9932af2e8 | ||
|
|
e0410c8f24 | ||
|
|
424a6cb41a | ||
|
|
afde71e913 | ||
|
|
086c32e711 | ||
|
|
710abcfaac | ||
|
|
be951d015c | ||
|
|
a07260bef4 | ||
|
|
dd19f8391d | ||
|
|
354873e220 | ||
|
|
386d4a15f0 | ||
|
|
1b75f15680 | ||
|
|
957bf74d97 |
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
@@ -51,7 +51,7 @@ updates:
|
||||
|
||||
storybook:
|
||||
applies-to: version-updates
|
||||
patterns: ["storybook*", "@storybook/*"]
|
||||
patterns: ["storybook*", "@storybook/*", "eslint-plugin-storybook"]
|
||||
|
||||
vitest:
|
||||
applies-to: version-updates
|
||||
@@ -67,10 +67,10 @@ updates:
|
||||
"@types/*",
|
||||
"storybook*",
|
||||
"@storybook/*",
|
||||
"eslint-plugin-storybook",
|
||||
"vitest",
|
||||
"@vitest/*",
|
||||
# Temporary exclusion of these packages from major updates
|
||||
"eslint-plugin-storybook",
|
||||
"eslint-plugin-vue",
|
||||
]
|
||||
|
||||
@@ -84,6 +84,7 @@ updates:
|
||||
"@types/*",
|
||||
"storybook*",
|
||||
"@storybook/*",
|
||||
"eslint-plugin-storybook",
|
||||
"vitest",
|
||||
"@vitest/*",
|
||||
# Temporary exclusion of these packages from minor updates
|
||||
@@ -102,6 +103,7 @@ updates:
|
||||
"@types/*",
|
||||
"storybook*",
|
||||
"@storybook/*",
|
||||
"eslint-plugin-storybook",
|
||||
"vitest",
|
||||
"@vitest/*",
|
||||
]
|
||||
|
||||
1
.github/workflows/main-build.yml
vendored
1
.github/workflows/main-build.yml
vendored
@@ -64,6 +64,7 @@ jobs:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
|
||||
|
||||
publish-develop-maven:
|
||||
|
||||
1
.github/workflows/release-docker.yml
vendored
1
.github/workflows/release-docker.yml
vendored
@@ -32,3 +32,4 @@ jobs:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
@@ -29,8 +29,8 @@ start_time2=$(date +%s)
|
||||
|
||||
echo "cd ./ui"
|
||||
cd ./ui
|
||||
echo "npm i"
|
||||
npm i
|
||||
echo "npm ci"
|
||||
npm ci
|
||||
|
||||
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
|
||||
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"
|
||||
|
||||
@@ -21,7 +21,7 @@ plugins {
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "7.1.0.6387"
|
||||
id "org.sonarqube" version "7.2.1.6560"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
@@ -331,7 +331,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.25"
|
||||
agent "org.aspectj:aspectjweaver:1.9.25.1"
|
||||
}
|
||||
|
||||
test {
|
||||
|
||||
@@ -82,8 +82,8 @@ dependencies {
|
||||
testImplementation "io.micronaut:micronaut-http-server-netty"
|
||||
testImplementation "io.micronaut:micronaut-management"
|
||||
|
||||
testImplementation "org.testcontainers:testcontainers:1.21.3"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
|
||||
testImplementation "org.testcontainers:testcontainers:1.21.4"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.21.4"
|
||||
testImplementation "org.bouncycastle:bcpkix-jdk18on"
|
||||
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
|
||||
@@ -3,6 +3,7 @@ package io.kestra.core.docs;
|
||||
import io.kestra.core.models.annotations.PluginSubGroup;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@@ -117,10 +118,17 @@ public class Plugin {
|
||||
.filter(not(io.kestra.core.models.Plugin::isInternal))
|
||||
.filter(clazzFilter)
|
||||
.filter(c -> !c.getName().startsWith("org.kestra."))
|
||||
.map(c -> new PluginElementMetadata(c.getName(), io.kestra.core.models.Plugin.isDeprecated(c) ? true : null))
|
||||
.map(c -> {
|
||||
Schema schema = c.getAnnotation(Schema.class);
|
||||
|
||||
var title = Optional.ofNullable(schema).map(Schema::title).filter(t -> !t.isEmpty()).orElse(null);
|
||||
var description = Optional.ofNullable(schema).map(Schema::description).filter(d -> !d.isEmpty()).orElse(null);
|
||||
var deprecated = io.kestra.core.models.Plugin.isDeprecated(c) ? true : null;
|
||||
|
||||
return new PluginElementMetadata(c.getName(), deprecated, title, description);
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
public record PluginElementMetadata(String cls, Boolean deprecated) {
|
||||
}
|
||||
public record PluginElementMetadata(String cls, Boolean deprecated, String title, String description) {}
|
||||
}
|
||||
|
||||
@@ -4,13 +4,16 @@ import io.kestra.core.utils.MapUtils;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
|
||||
public record Label(@NotEmpty String key, @NotEmpty String value) {
|
||||
public record Label(
|
||||
@NotEmpty @Pattern(regexp = "^[\\p{Ll}][\\p{L}0-9._-]*$", message = "Invalid label key. A valid key contains only lowercase letters numbers hyphens (-) underscores (_) or periods (.) and must begin with a lowercase letter.") String key,
|
||||
@NotEmpty String value) {
|
||||
public static final String SYSTEM_PREFIX = "system.";
|
||||
|
||||
// system labels
|
||||
@@ -23,6 +26,7 @@ public record Label(@NotEmpty String key, @NotEmpty String value) {
|
||||
public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
|
||||
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
|
||||
public static final String TEST = SYSTEM_PREFIX + "test";
|
||||
public static final String FROM = SYSTEM_PREFIX + "from";
|
||||
|
||||
/**
|
||||
* Static helper method for converting a list of labels to a nested map.
|
||||
|
||||
@@ -94,7 +94,7 @@ public record QueryFilter(
|
||||
KIND("kind") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS,Op.NOT_EQUALS);
|
||||
return List.of(Op.EQUALS,Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
LABELS("labels") {
|
||||
@@ -106,7 +106,7 @@ public record QueryFilter(
|
||||
FLOW_ID("flowId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
|
||||
}
|
||||
},
|
||||
UPDATED("updated") {
|
||||
@@ -226,7 +226,7 @@ public record QueryFilter(
|
||||
FLOW {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
|
||||
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE, Field.FLOW_ID);
|
||||
}
|
||||
},
|
||||
NAMESPACE {
|
||||
@@ -241,7 +241,7 @@ public record QueryFilter(
|
||||
return List.of(
|
||||
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
|
||||
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
|
||||
Field.NAMESPACE,Field.KIND
|
||||
Field.NAMESPACE, Field.KIND
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -16,6 +16,7 @@ import jakarta.validation.constraints.NotNull;
|
||||
public class Setting {
|
||||
public static final String INSTANCE_UUID = "instance.uuid";
|
||||
public static final String INSTANCE_VERSION = "instance.version";
|
||||
public static final String INSTANCE_EDITION = "instance.edition";
|
||||
|
||||
@NotNull
|
||||
private String key;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
@@ -130,7 +129,7 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
@Valid
|
||||
@PluginProperty
|
||||
List<SLA> sla;
|
||||
|
||||
|
||||
@Schema(
|
||||
title = "Conditions evaluated before the flow is executed.",
|
||||
description = "A list of conditions that are evaluated before the flow is executed. If no checks are defined, the flow executes normally."
|
||||
@@ -355,7 +354,7 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
* To be conservative a flow MUST not return any source.
|
||||
*/
|
||||
@Override
|
||||
@JsonIgnore
|
||||
@Schema(hidden = true)
|
||||
public String getSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.ToString;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@@ -48,7 +46,7 @@ public class FlowWithSource extends Flow {
|
||||
}
|
||||
|
||||
@Override
|
||||
@JsonIgnore(value = false)
|
||||
@Schema(hidden = false)
|
||||
public String getSource() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
@@ -267,6 +267,10 @@ public class State {
|
||||
return this == Type.RUNNING || this == Type.KILLING;
|
||||
}
|
||||
|
||||
public boolean onlyRunning() {
|
||||
return this == Type.RUNNING;
|
||||
}
|
||||
|
||||
public boolean isFailed() {
|
||||
return this == Type.FAILED;
|
||||
}
|
||||
|
||||
@@ -93,7 +93,7 @@ public class Property<T> {
|
||||
* @return a new {@link Property} without a pre-rendered value
|
||||
*/
|
||||
public Property<T> skipCache() {
|
||||
return Property.ofExpression(expression);
|
||||
return new Property<>(expression, true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -82,6 +82,12 @@ abstract public class AbstractTrigger implements TriggerInterface {
|
||||
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
|
||||
private boolean failOnTriggerError = false;
|
||||
|
||||
@PluginProperty(group = PluginProperty.CORE_GROUP)
|
||||
@Schema(
|
||||
title = "Specifies whether a trigger is allowed to start a new execution even if a previous run is still in progress."
|
||||
)
|
||||
private boolean allowConcurrent = false;
|
||||
|
||||
/**
|
||||
* For backward compatibility: we rename minLogLevel to logLevel.
|
||||
* @deprecated use {@link #logLevel} instead
|
||||
|
||||
@@ -1,22 +1,37 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Map;
|
||||
|
||||
public interface Schedulable extends PollingTriggerInterface{
|
||||
String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules";
|
||||
|
||||
@Schema(
|
||||
title = "The inputs to pass to the scheduled flow"
|
||||
)
|
||||
@PluginProperty(dynamic = true)
|
||||
Map<String, Object> getInputs();
|
||||
|
||||
@Schema(
|
||||
title = "Action to take in the case of missed schedules",
|
||||
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
|
||||
"The default is `ALL` unless a different value is configured using the global plugin configuration."
|
||||
)
|
||||
@PluginProperty
|
||||
RecoverMissedSchedules getRecoverMissedSchedules();
|
||||
|
||||
/**
|
||||
* Compute the previous evaluation of a trigger.
|
||||
* This is used when a trigger misses some schedule to compute the next date to evaluate in the past.
|
||||
*/
|
||||
ZonedDateTime previousEvaluationDate(ConditionContext conditionContext) throws IllegalVariableEvaluationException;
|
||||
|
||||
RecoverMissedSchedules getRecoverMissedSchedules();
|
||||
|
||||
|
||||
/**
|
||||
* Load the default RecoverMissedSchedules from plugin property, or else ALL.
|
||||
*/
|
||||
|
||||
@@ -172,7 +172,7 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
|
||||
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
|
||||
try {
|
||||
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
|
||||
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, lastTrigger);
|
||||
} catch (InvalidTriggerConfigurationException e) {
|
||||
disabled = true;
|
||||
}
|
||||
|
||||
@@ -6,12 +6,9 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.ExecutionTrigger;
|
||||
import io.kestra.core.models.tasks.Output;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.*;
|
||||
|
||||
public abstract class TriggerService {
|
||||
@@ -51,58 +48,6 @@ public abstract class TriggerService {
|
||||
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
|
||||
}
|
||||
|
||||
public static Execution generateScheduledExecution(
|
||||
AbstractTrigger trigger,
|
||||
ConditionContext conditionContext,
|
||||
TriggerContext context,
|
||||
List<Label> labels,
|
||||
Map<String, Object> inputs,
|
||||
Map<String, Object> variables,
|
||||
Optional<ZonedDateTime> scheduleDate
|
||||
) {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables);
|
||||
|
||||
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
|
||||
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
|
||||
// add a correlation ID if none exist
|
||||
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
|
||||
}
|
||||
Execution execution = Execution.builder()
|
||||
.id(runContext.getTriggerExecutionId())
|
||||
.tenantId(context.getTenantId())
|
||||
.namespace(context.getNamespace())
|
||||
.flowId(context.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.variables(conditionContext.getFlow().getVariables())
|
||||
.labels(executionLabels)
|
||||
.state(new State())
|
||||
.trigger(executionTrigger)
|
||||
.scheduleDate(scheduleDate.map(date -> date.toInstant()).orElse(null))
|
||||
.build();
|
||||
|
||||
Map<String, Object> allInputs = new HashMap<>();
|
||||
// add flow inputs with default value
|
||||
var flow = conditionContext.getFlow();
|
||||
if (flow.getInputs() != null) {
|
||||
flow.getInputs().stream()
|
||||
.filter(input -> input.getDefaults() != null)
|
||||
.forEach(input -> allInputs.put(input.getId(), input.getDefaults()));
|
||||
}
|
||||
|
||||
if (inputs != null) {
|
||||
allInputs.putAll(inputs);
|
||||
}
|
||||
|
||||
// add inputs and inject defaults
|
||||
if (!allInputs.isEmpty()) {
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
|
||||
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
|
||||
}
|
||||
|
||||
return execution;
|
||||
}
|
||||
|
||||
private static Execution generateExecution(
|
||||
String id,
|
||||
AbstractTrigger trigger,
|
||||
@@ -111,6 +56,7 @@ public abstract class TriggerService {
|
||||
ConditionContext conditionContext
|
||||
) {
|
||||
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
|
||||
executionLabels.add(new Label(Label.FROM, "trigger"));
|
||||
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
|
||||
// add a correlation ID if none exist
|
||||
executionLabels.add(new Label(Label.CORRELATION_ID, id));
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import io.kestra.core.models.Setting;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
public interface SettingRepositoryInterface {
|
||||
Optional<Setting> findByKey(String key);
|
||||
@@ -13,5 +13,7 @@ public interface SettingRepositoryInterface {
|
||||
|
||||
Setting save(Setting setting) throws ConstraintViolationException;
|
||||
|
||||
Setting internalSave(Setting setting) throws ConstraintViolationException;
|
||||
|
||||
Setting delete(Setting setting);
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ import java.util.function.Function;
|
||||
public interface TriggerRepositoryInterface extends QueryBuilderInterface<Triggers.Fields> {
|
||||
Optional<Trigger> findLast(TriggerContext trigger);
|
||||
|
||||
Optional<Trigger> findByExecution(Execution execution);
|
||||
|
||||
Optional<Trigger> findByUid(String uid);
|
||||
|
||||
List<Trigger> findAll(String tenantId);
|
||||
|
||||
List<Trigger> findAllForAllTenants();
|
||||
|
||||
@@ -6,10 +6,12 @@ import com.google.common.base.CaseFormat;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.metrics.MetricRegistry;
|
||||
import io.kestra.core.models.Plugin;
|
||||
import io.kestra.core.models.executions.AbstractMetricEntry;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.plugins.PluginConfigurations;
|
||||
import io.kestra.core.services.KVStoreService;
|
||||
import io.kestra.core.storages.Storage;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
@@ -235,6 +237,14 @@ public class DefaultRunContext extends RunContext {
|
||||
return runContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RunContext cloneForPlugin(Plugin plugin) {
|
||||
PluginConfigurations pluginConfigurations = applicationContext.getBean(PluginConfigurations.class);
|
||||
DefaultRunContext runContext = clone();
|
||||
runContext.pluginConfiguration = pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass());
|
||||
return runContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@@ -589,6 +599,11 @@ public class DefaultRunContext extends RunContext {
|
||||
return localPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputAndOutput inputAndOutput() {
|
||||
return new InputAndOutputImpl(this.applicationContext, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder class for constructing new {@link DefaultRunContext} objects.
|
||||
*/
|
||||
|
||||
@@ -189,12 +189,11 @@ public final class ExecutableUtils {
|
||||
variables.put("taskRunIteration", currentTaskRun.getIteration());
|
||||
}
|
||||
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
|
||||
Instant scheduleOnDate = runContext.render(scheduleDate).as(ZonedDateTime.class).map(date -> date.toInstant()).orElse(null);
|
||||
Execution execution = Execution
|
||||
.newExecution(
|
||||
flow,
|
||||
(f, e) -> flowInputOutput.readExecutionInputs(f, e, inputs),
|
||||
(f, e) -> runContext.inputAndOutput().readInputs(f, e, inputs),
|
||||
newLabels,
|
||||
Optional.empty())
|
||||
.withTrigger(ExecutionTrigger.builder()
|
||||
|
||||
@@ -3,13 +3,11 @@ package io.kestra.core.runners;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.kestra.core.encryption.EncryptionService;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Data;
|
||||
import io.kestra.core.models.flows.DependsOn;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.flows.Output;
|
||||
import io.kestra.core.models.flows.RenderableInput;
|
||||
import io.kestra.core.models.flows.Type;
|
||||
import io.kestra.core.models.flows.input.FileInput;
|
||||
@@ -158,11 +156,7 @@ public class FlowInputOutput {
|
||||
File tempFile = File.createTempFile(prefix, fileExtension);
|
||||
try (var inputStream = fileUpload.getInputStream();
|
||||
var outputStream = new FileOutputStream(tempFile)) {
|
||||
long transferredBytes = inputStream.transferTo(outputStream);
|
||||
if (transferredBytes == 0) {
|
||||
sink.error(new KestraRuntimeException("Can't upload file: " + fileUpload.getFilename()));
|
||||
return;
|
||||
}
|
||||
inputStream.transferTo(outputStream);
|
||||
URI from = storageInterface.from(execution, inputId, fileName, tempFile);
|
||||
sink.next(Map.entry(inputId, from.toString()));
|
||||
} finally {
|
||||
@@ -382,11 +376,11 @@ public class FlowInputOutput {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||
return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
|
||||
return Property.as((Property<T>) input.getDefaults().skipCache(), renderer, clazz);
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||
return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
|
||||
return Property.asList((Property<List<T>>) input.getDefaults().skipCache(), renderer, clazz);
|
||||
}
|
||||
|
||||
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
|
||||
@@ -502,8 +496,8 @@ public class FlowInputOutput {
|
||||
yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString()));
|
||||
}
|
||||
}
|
||||
case JSON -> JacksonMapper.toObject(current.toString());
|
||||
case YAML -> YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
|
||||
case JSON -> (current instanceof Map || current instanceof Collection<?>) ? current : JacksonMapper.toObject(current.toString());
|
||||
case YAML -> (current instanceof Map || current instanceof Collection<?>) ? current : YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
|
||||
case URI -> {
|
||||
Matcher matcher = URI_PATTERN.matcher(current.toString());
|
||||
if (matcher.matches()) {
|
||||
@@ -543,30 +537,6 @@ public class FlowInputOutput {
|
||||
}
|
||||
}
|
||||
|
||||
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
|
||||
if (outputs == null) return Map.of();
|
||||
|
||||
// render required outputs
|
||||
Map<String, Object> outputsById = outputs
|
||||
.stream()
|
||||
.filter(output -> output.getRequired() == null || output.getRequired())
|
||||
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
|
||||
outputsById = runContext.render(outputsById);
|
||||
|
||||
// render optional outputs one by one to catch, log, and skip any error.
|
||||
for (io.kestra.core.models.flows.Output output : outputs) {
|
||||
if (Boolean.FALSE.equals(output.getRequired())) {
|
||||
try {
|
||||
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
|
||||
} catch (Exception e) {
|
||||
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
|
||||
outputsById.put(output.getId(), null);
|
||||
}
|
||||
}
|
||||
}
|
||||
return outputsById;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutable wrapper to hold a flow's input, and it's resolved value.
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Output;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* InputAndOutput could be used to work with flow execution inputs and outputs.
|
||||
*/
|
||||
public interface InputAndOutput {
|
||||
/**
|
||||
* Reads the inputs of a flow execution.
|
||||
*/
|
||||
Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs);
|
||||
|
||||
/**
|
||||
* Processes the outputs of a flow execution (parse them based on their types).
|
||||
*/
|
||||
Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs);
|
||||
|
||||
/**
|
||||
* Render flow execution outputs.
|
||||
*/
|
||||
Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException;
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Output;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
class InputAndOutputImpl implements InputAndOutput {
|
||||
private final FlowInputOutput flowInputOutput;
|
||||
private final RunContext runContext;
|
||||
|
||||
InputAndOutputImpl(ApplicationContext applicationContext, RunContext runContext) {
|
||||
this.flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
|
||||
this.runContext = runContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs) {
|
||||
return flowInputOutput.readExecutionInputs(flow, execution, inputs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs) {
|
||||
return flowInputOutput.typedOutputs(flow, execution, rOutputs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException {
|
||||
if (outputs == null) return Map.of();
|
||||
|
||||
// render required outputs
|
||||
Map<String, Object> outputsById = outputs
|
||||
.stream()
|
||||
.filter(output -> output.getRequired() == null || output.getRequired())
|
||||
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
|
||||
outputsById = runContext.render(outputsById);
|
||||
|
||||
// render optional outputs one by one to catch, log, and skip any error.
|
||||
for (io.kestra.core.models.flows.Output output : outputs) {
|
||||
if (Boolean.FALSE.equals(output.getRequired())) {
|
||||
try {
|
||||
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
|
||||
} catch (Exception e) {
|
||||
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
|
||||
outputsById.put(output.getId(), null);
|
||||
}
|
||||
}
|
||||
}
|
||||
return outputsById;
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import io.kestra.core.encryption.EncryptionService;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.Plugin;
|
||||
import io.kestra.core.models.executions.AbstractMetricEntry;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.property.PropertyContext;
|
||||
@@ -204,4 +205,15 @@ public abstract class RunContext implements PropertyContext {
|
||||
* when Namespace ACLs are used (EE).
|
||||
*/
|
||||
public abstract AclChecker acl();
|
||||
|
||||
/**
|
||||
* Clone this run context for a specific plugin.
|
||||
* @return a new run context with the plugin configuration of the given plugin.
|
||||
*/
|
||||
public abstract RunContext cloneForPlugin(Plugin plugin);
|
||||
|
||||
/**
|
||||
* @return an InputAndOutput that can be used to work with inputs and outputs.
|
||||
*/
|
||||
public abstract InputAndOutput inputAndOutput();
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import io.kestra.core.models.Plugin;
|
||||
import io.kestra.core.models.executions.TaskRun;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.runners.TaskRunner;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.TriggerContext;
|
||||
import io.kestra.core.plugins.PluginConfigurations;
|
||||
@@ -53,20 +51,6 @@ public class RunContextInitializer {
|
||||
@Value("${kestra.encryption.secret-key}")
|
||||
protected Optional<String> secretKey;
|
||||
|
||||
/**
|
||||
* Initializes the given {@link RunContext} for the given {@link Plugin}.
|
||||
*
|
||||
* @param runContext The {@link RunContext} to initialize.
|
||||
* @param plugin The {@link TaskRunner} used for initialization.
|
||||
* @return The {@link RunContext} to initialize
|
||||
*/
|
||||
public DefaultRunContext forPlugin(final DefaultRunContext runContext,
|
||||
final Plugin plugin) {
|
||||
runContext.init(applicationContext);
|
||||
runContext.setPluginConfiguration(pluginConfigurations.getConfigurationByPluginTypeOrAliases(plugin.getType(), plugin.getClass()));
|
||||
return runContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the given {@link RunContext} for the given {@link WorkerTask} for executor.
|
||||
*
|
||||
|
||||
@@ -55,11 +55,11 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
|
||||
|
||||
public RunContextLogger(QueueInterface<LogEntry> logQueue, LogEntry logEntry, org.slf4j.event.Level loglevel, boolean logToFile) {
|
||||
if (logEntry.getTaskId() != null) {
|
||||
this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTaskId();
|
||||
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTaskId();
|
||||
} else if (logEntry.getTriggerId() != null) {
|
||||
this.loggerName = "flow." + logEntry.getFlowId() + "." + logEntry.getTriggerId();
|
||||
this.loggerName = baseLoggerName(logEntry) + "." + logEntry.getTriggerId();
|
||||
} else {
|
||||
this.loggerName = "flow." + logEntry.getFlowId();
|
||||
this.loggerName = baseLoggerName(logEntry);
|
||||
}
|
||||
|
||||
this.logQueue = logQueue;
|
||||
@@ -68,6 +68,10 @@ public class RunContextLogger implements Supplier<org.slf4j.Logger> {
|
||||
this.logToFile = logToFile;
|
||||
}
|
||||
|
||||
private String baseLoggerName(LogEntry logEntry) {
|
||||
return "flow." + logEntry.getTenantId() + "." + logEntry.getNamespace() + "." + logEntry.getFlowId();
|
||||
}
|
||||
|
||||
private static List<LogEntry> logEntry(ILoggingEvent event, String message, org.slf4j.event.Level level, LogEntry logEntry) {
|
||||
Iterable<String> split;
|
||||
|
||||
|
||||
@@ -81,7 +81,24 @@ public final class YamlParser {
|
||||
throw toConstraintViolationException(input, resource, e);
|
||||
}
|
||||
}
|
||||
|
||||
private static String formatYamlErrorMessage(String originalMessage, JsonProcessingException e) {
|
||||
StringBuilder friendlyMessage = new StringBuilder();
|
||||
if (originalMessage.contains("Expected a field name")) {
|
||||
friendlyMessage.append("YAML syntax error: Invalid structure. Check indentation and ensure all fields are properly formatted.");
|
||||
} else if (originalMessage.contains("MappingStartEvent")) {
|
||||
friendlyMessage.append("YAML syntax error: Unexpected mapping start. Verify that scalar values are properly quoted if needed.");
|
||||
} else if (originalMessage.contains("Scalar value")) {
|
||||
friendlyMessage.append("YAML syntax error: Expected a simple value but found complex structure. Check for unquoted special characters.");
|
||||
} else {
|
||||
friendlyMessage.append("YAML parsing error: ").append(originalMessage.replaceAll("org\\.yaml\\.snakeyaml.*", "").trim());
|
||||
}
|
||||
if (e.getLocation() != null) {
|
||||
int line = e.getLocation().getLineNr();
|
||||
friendlyMessage.append(String.format(" (at line %d)", line));
|
||||
}
|
||||
// Return a generic but cleaner message for other YAML errors
|
||||
return friendlyMessage.toString();
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> ConstraintViolationException toConstraintViolationException(T target, String resource, JsonProcessingException e) {
|
||||
if (e.getCause() instanceof ConstraintViolationException constraintViolationException) {
|
||||
@@ -121,11 +138,12 @@ public final class YamlParser {
|
||||
)
|
||||
));
|
||||
} else {
|
||||
String userFriendlyMessage = formatYamlErrorMessage(e.getMessage(), e);
|
||||
return new ConstraintViolationException(
|
||||
"Illegal " + resource + " source: " + e.getMessage(),
|
||||
"Illegal " + resource + " source: " + userFriendlyMessage,
|
||||
Collections.singleton(
|
||||
ManualConstraintViolation.of(
|
||||
e.getCause() == null ? e.getMessage() : e.getMessage() + "\nCaused by: " + e.getCause().getMessage(),
|
||||
userFriendlyMessage,
|
||||
target,
|
||||
(Class<T>) target.getClass(),
|
||||
"yaml",
|
||||
@@ -136,4 +154,3 @@ public final class YamlParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import com.cronutils.utils.VisibleForTesting;
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
import io.kestra.core.models.conditions.Condition;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.conditions.ScheduleCondition;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
@@ -65,16 +64,6 @@ public class ConditionService {
|
||||
return this.valid(flow, conditions, conditionContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that all conditions are valid.
|
||||
* Warning, this method throws if a condition cannot be evaluated.
|
||||
*/
|
||||
public boolean isValid(List<ScheduleCondition> conditions, ConditionContext conditionContext) throws InternalException {
|
||||
return conditions
|
||||
.stream()
|
||||
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that all conditions are valid.
|
||||
* Warning, this method throws if a condition cannot be evaluated.
|
||||
|
||||
@@ -92,7 +92,14 @@ public class FlowService {
|
||||
return flowRepository
|
||||
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
|
||||
}
|
||||
|
||||
private static String formatValidationError(String message) {
|
||||
if (message.startsWith("Illegal flow source:")) {
|
||||
// Already formatted by YamlParser, return as-is
|
||||
return message;
|
||||
}
|
||||
// For other validation errors, provide context
|
||||
return "Validation error: " + message;
|
||||
}
|
||||
/**
|
||||
* Evaluates all checks defined in the given flow using the provided inputs.
|
||||
* <p>
|
||||
@@ -174,10 +181,12 @@ public class FlowService {
|
||||
modelValidator.validate(pluginDefaultService.injectAllDefaults(flow, false));
|
||||
|
||||
} catch (ConstraintViolationException e) {
|
||||
validateConstraintViolationBuilder.constraints(e.getMessage());
|
||||
String friendlyMessage = formatValidationError(e.getMessage());
|
||||
validateConstraintViolationBuilder.constraints(friendlyMessage);
|
||||
} catch (FlowProcessingException e) {
|
||||
if (e.getCause() instanceof ConstraintViolationException) {
|
||||
validateConstraintViolationBuilder.constraints(e.getMessage());
|
||||
if (e.getCause() instanceof ConstraintViolationException cve) {
|
||||
String friendlyMessage = formatValidationError(cve.getMessage());
|
||||
validateConstraintViolationBuilder.constraints(friendlyMessage);
|
||||
} else {
|
||||
Throwable cause = e.getCause() != null ? e.getCause() : e;
|
||||
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + cause.getMessage());
|
||||
@@ -579,4 +588,4 @@ public class FlowService {
|
||||
private IllegalStateException noRepositoryException() {
|
||||
return new IllegalStateException("No repository found. Make sure the `kestra.repository.type` property is set.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.core.storages;
|
||||
|
||||
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
|
||||
import io.kestra.core.services.NamespaceService;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.slf4j.Logger;
|
||||
@@ -272,7 +271,13 @@ public class InternalStorage implements Storage {
|
||||
return this.storage.put(context.getTenantId(), context.getNamespace(), resolve, new BufferedInputStream(inputStream));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<StorageContext.Task> getTaskStorageContext() {
|
||||
return Optional.ofNullable((context instanceof StorageContext.Task task) ? task : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<FileAttributes> list(URI uri) throws IOException {
|
||||
return this.storage.list(context.getTenantId(), context.getNamespace(), uri);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,4 +173,6 @@ public interface Storage {
|
||||
* @return the task storage context
|
||||
*/
|
||||
Optional<StorageContext.Task> getTaskStorageContext();
|
||||
|
||||
List<FileAttributes> list(URI uri) throws IOException;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,39 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import io.kestra.core.models.Setting;
|
||||
import io.kestra.core.repositories.SettingRepositoryInterface;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
@Singleton
|
||||
public class EditionProvider {
|
||||
public Edition get() {
|
||||
return Edition.OSS;
|
||||
}
|
||||
|
||||
@Inject
|
||||
private Optional<SettingRepositoryInterface> settingRepository; // repositories are not always there on unit tests
|
||||
|
||||
@PostConstruct
|
||||
void start() {
|
||||
// check the edition in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
|
||||
settingRepository.ifPresent(settingRepositoryInterface -> persistEdition(settingRepositoryInterface, get()));
|
||||
}
|
||||
|
||||
private void persistEdition(SettingRepositoryInterface settingRepositoryInterface, Edition edition) {
|
||||
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_EDITION);
|
||||
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(edition)) {
|
||||
settingRepositoryInterface.save(Setting.builder()
|
||||
.key(Setting.INSTANCE_EDITION)
|
||||
.value(edition)
|
||||
.build()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public enum Edition {
|
||||
OSS,
|
||||
EE
|
||||
|
||||
@@ -11,6 +11,11 @@ import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
/**
|
||||
* Utility class to create {@link java.util.concurrent.ExecutorService} with {@link java.util.concurrent.ExecutorService} instances.
|
||||
* WARNING: those instances will use the {@link ThreadUncaughtExceptionHandler} which terminates Kestra if an error occurs in any thread,
|
||||
* so it should not be used inside plugins.
|
||||
*/
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class ExecutorsUtils {
|
||||
|
||||
@@ -65,10 +65,9 @@ public class ListUtils {
|
||||
}
|
||||
|
||||
public static List<String> convertToListString(Object object){
|
||||
if (object instanceof List<?> list && (list.isEmpty() || list.getFirst() instanceof String)) {
|
||||
return (List<String>) list;
|
||||
} else {
|
||||
throw new IllegalArgumentException("%s in not an instance of List of String".formatted(object));
|
||||
}
|
||||
return convertToList(object)
|
||||
.stream()
|
||||
.map(Object::toString)
|
||||
.toList();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
/**
|
||||
* Utility class for logging
|
||||
* Utility class for server logging
|
||||
*/
|
||||
public final class Logs {
|
||||
|
||||
@@ -18,7 +18,7 @@ public final class Logs {
|
||||
private static final String EXECUTION_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[execution: {}] ";
|
||||
private static final String TRIGGER_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[trigger: {}] ";
|
||||
private static final String TASKRUN_PREFIX_WITH_TENANT = FLOW_PREFIX_WITH_TENANT + "[task: {}] [execution: {}] [taskrun: {}] ";
|
||||
|
||||
|
||||
private Logs() {}
|
||||
|
||||
public static void logExecution(FlowId flow, Logger logger, Level level, String message, Object... args) {
|
||||
@@ -29,7 +29,7 @@ public final class Logs {
|
||||
}
|
||||
|
||||
/**
|
||||
* Log an {@link Execution} via the execution logger named: 'execution.{flowId}'.
|
||||
* Log an {@link Execution} via the executor logger named: 'executor.{tenantId}.{namespace}.{flowId}'.
|
||||
*/
|
||||
public static void logExecution(Execution execution, Level level, String message, Object... args) {
|
||||
Logger logger = logger(execution);
|
||||
@@ -43,7 +43,7 @@ public final class Logs {
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a {@link TriggerContext} via the trigger logger named: 'trigger.{flowId}.{triggereId}'.
|
||||
* Log a {@link TriggerContext} via the scheduler logger named: 'trigger.{tenantId}.{namespace}.{flowId}.{triggerId}'.
|
||||
*/
|
||||
public static void logTrigger(TriggerContext triggerContext, Level level, String message, Object... args) {
|
||||
Logger logger = logger(triggerContext);
|
||||
@@ -57,7 +57,7 @@ public final class Logs {
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a {@link TaskRun} via the taskRun logger named: 'task.{flowId}.{taskId}'.
|
||||
* Log a {@link TaskRun} via the worker logger named: 'worker.{tenantId}.{namespace}.{flowId}.{taskId}'.
|
||||
*/
|
||||
public static void logTaskRun(TaskRun taskRun, Level level, String message, Object... args) {
|
||||
String prefix = TASKRUN_PREFIX_WITH_TENANT;
|
||||
@@ -73,19 +73,19 @@ public final class Logs {
|
||||
|
||||
private static Logger logger(TaskRun taskRun) {
|
||||
return LoggerFactory.getLogger(
|
||||
"task." + taskRun.getFlowId() + "." + taskRun.getTaskId()
|
||||
"worker." + taskRun.getTenantId() + "." + taskRun.getNamespace() + "." + taskRun.getFlowId() + "." + taskRun.getTaskId()
|
||||
);
|
||||
}
|
||||
|
||||
private static Logger logger(TriggerContext triggerContext) {
|
||||
return LoggerFactory.getLogger(
|
||||
"trigger." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId()
|
||||
"scheduler." + triggerContext.getTenantId() + "." + triggerContext.getNamespace() + "." + triggerContext.getFlowId() + "." + triggerContext.getTriggerId()
|
||||
);
|
||||
}
|
||||
|
||||
private static Logger logger(Execution execution) {
|
||||
return LoggerFactory.getLogger(
|
||||
"execution." + execution.getFlowId()
|
||||
"executor." + execution.getTenantId() + "." + execution.getNamespace() + "." + execution.getFlowId()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,7 +120,10 @@ public class MapUtils {
|
||||
private static Collection<?> mergeCollections(Collection<?> colA, Collection<?> colB) {
|
||||
List<Object> merged = new ArrayList<>(colA.size() + colB.size());
|
||||
merged.addAll(colA);
|
||||
merged.addAll(colB);
|
||||
if (!colB.isEmpty()) {
|
||||
List<?> filtered = colB.stream().filter(it -> !colA.contains(it)).toList();
|
||||
merged.addAll(filtered);
|
||||
}
|
||||
return merged;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import io.kestra.core.models.executions.metrics.Counter;
|
||||
import io.kestra.core.models.executions.metrics.Timer;
|
||||
import io.kestra.core.models.tasks.FileExistComportment;
|
||||
import io.kestra.core.models.tasks.NamespaceFiles;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.storages.NamespaceFile;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DurationFormatUtils;
|
||||
import org.apache.commons.lang3.time.StopWatch;
|
||||
@@ -19,26 +17,27 @@ import java.io.InputStream;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.*;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@Singleton
|
||||
public class NamespaceFilesUtils {
|
||||
@Inject
|
||||
private ExecutorsUtils executorsUtils;
|
||||
public final class NamespaceFilesUtils {
|
||||
private static final int maxThreads = Math.max(Runtime.getRuntime().availableProcessors() * 4, 32);
|
||||
private static final ExecutorService EXECUTOR_SERVICE = new ThreadPoolExecutor(
|
||||
0,
|
||||
maxThreads,
|
||||
60L,
|
||||
TimeUnit.SECONDS,
|
||||
new SynchronousQueue<>(),
|
||||
new ThreadFactoryBuilder().setNameFormat("namespace-files").build()
|
||||
);;
|
||||
|
||||
private ExecutorService executorService;
|
||||
|
||||
@PostConstruct
|
||||
public void postConstruct() {
|
||||
this.executorService = executorsUtils.maxCachedThreadPool(Math.max(Runtime.getRuntime().availableProcessors() * 4, 32), "namespace-file");
|
||||
private NamespaceFilesUtils() {
|
||||
// utility class pattern
|
||||
}
|
||||
|
||||
public void loadNamespaceFiles(
|
||||
public static void loadNamespaceFiles(
|
||||
RunContext runContext,
|
||||
NamespaceFiles namespaceFiles
|
||||
)
|
||||
@@ -63,7 +62,11 @@ public class NamespaceFilesUtils {
|
||||
matchedNamespaceFiles.addAll(files);
|
||||
}
|
||||
|
||||
// Use half of the available threads to avoid impacting concurrent tasks
|
||||
int parallelism = maxThreads / 2;
|
||||
Flux.fromIterable(matchedNamespaceFiles)
|
||||
.parallel(parallelism)
|
||||
.runOn(Schedulers.fromExecutorService(EXECUTOR_SERVICE))
|
||||
.doOnNext(throwConsumer(nsFile -> {
|
||||
InputStream content = runContext.storage().getFile(nsFile.uri());
|
||||
Path path = folderPerNamespace ?
|
||||
@@ -71,7 +74,7 @@ public class NamespaceFilesUtils {
|
||||
Path.of(nsFile.path());
|
||||
runContext.workingDir().putFile(path, content, fileExistComportment);
|
||||
}))
|
||||
.publishOn(Schedulers.fromExecutorService(executorService))
|
||||
.sequential()
|
||||
.blockLast();
|
||||
|
||||
Duration duration = stopWatch.getDuration();
|
||||
|
||||
@@ -23,7 +23,6 @@ import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
|
||||
import io.kestra.core.services.StorageService;
|
||||
import io.kestra.core.storages.FileAttributes;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.StorageSplitInterface;
|
||||
import io.kestra.core.utils.GraphUtils;
|
||||
import io.kestra.core.validations.NoSystemLabelValidation;
|
||||
@@ -540,7 +539,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
|
||||
.numberOfBatches((Integer) taskRun.getOutputs().get(ExecutableUtils.TASK_VARIABLE_NUMBER_OF_BATCHES));
|
||||
|
||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
|
||||
FileSerde.write(bos, FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext));
|
||||
FileSerde.write(bos, runContext.inputAndOutput().renderOutputs(flow.getOutputs()));
|
||||
URI uri = runContext.storage().putFile(
|
||||
new ByteArrayInputStream(bos.toByteArray()),
|
||||
URI.create((String) taskRun.getOutputs().get("uri"))
|
||||
@@ -602,9 +601,8 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
|
||||
String subflowOutputsBase = (String) taskOutput.get(ExecutableUtils.TASK_VARIABLE_SUBFLOW_OUTPUTS_BASE_URI);
|
||||
URI subflowOutputsBaseUri = URI.create(StorageContext.KESTRA_PROTOCOL + subflowOutputsBase + "/");
|
||||
|
||||
StorageInterface storage = ((DefaultRunContext) runContext).getApplicationContext().getBean(StorageInterface.class);
|
||||
if (storage.exists(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri)) {
|
||||
List<FileAttributes> list = storage.list(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri);
|
||||
if (runContext.storage().isFileExist(subflowOutputsBaseUri)) {
|
||||
List<FileAttributes> list = runContext.storage().list(subflowOutputsBaseUri);;
|
||||
|
||||
if (!list.isEmpty()) {
|
||||
// Merge outputs from each sub-flow into a single stored in the internal storage.
|
||||
|
||||
@@ -157,7 +157,7 @@ public class LoopUntil extends Task implements FlowableTask<LoopUntil.Output> {
|
||||
|
||||
public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException {
|
||||
if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) {
|
||||
String continueLoop = runContext.render(this.condition).as(String.class).orElse(null);
|
||||
String continueLoop = runContext.render(this.condition).skipCache().as(String.class).orElse(null);
|
||||
if (!TruthUtils.isTruthy(continueLoop)) {
|
||||
return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow());
|
||||
}
|
||||
|
||||
@@ -63,7 +63,8 @@ import java.util.*;
|
||||
|
||||
- id: run_post_approval
|
||||
type: io.kestra.plugin.scripts.shell.Commands
|
||||
runner: PROCESS
|
||||
taskRunner:
|
||||
type: io.kestra.plugin.core.runner.Process
|
||||
commands:
|
||||
- echo "Manual approval received! Continuing the execution..."
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ import io.kestra.core.models.tasks.ExecutableTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.ExecutableUtils;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.FlowMetaStoreInterface;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.SubflowExecution;
|
||||
@@ -38,7 +37,6 @@ import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.ToString;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collections;
|
||||
@@ -246,11 +244,11 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
|
||||
|
||||
if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
|
||||
try {
|
||||
Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
|
||||
var inputAndOutput = runContext.inputAndOutput();
|
||||
Map<String, Object> rOutputs = inputAndOutput.renderOutputs(subflowOutputs);
|
||||
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
|
||||
if (flow.getOutputs() != null && flowInputOutput != null) {
|
||||
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
|
||||
if (flow.getOutputs() != null) {
|
||||
rOutputs = inputAndOutput.typedOutputs(flow, execution, rOutputs);
|
||||
}
|
||||
builder.outputs(rOutputs);
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -123,7 +123,7 @@ public class Switch extends Task implements FlowableTask<Switch.Output> {
|
||||
}
|
||||
|
||||
private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException {
|
||||
return runContext.render(this.value).as(String.class).orElseThrow();
|
||||
return runContext.render(this.value).skipCache().as(String.class).orElseThrow();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -260,8 +260,7 @@ public class WorkingDirectory extends Sequential implements NamespaceFilesInterf
|
||||
}
|
||||
|
||||
if (this.namespaceFiles != null && !Boolean.FALSE.equals(runContext.render(this.namespaceFiles.getEnabled()).as(Boolean.class).orElse(true))) {
|
||||
NamespaceFilesUtils namespaceFilesUtils = ((DefaultRunContext) runContext).getApplicationContext().getBean(NamespaceFilesUtils.class);
|
||||
namespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles);
|
||||
NamespaceFilesUtils.loadNamespaceFiles(runContext, this.namespaceFiles);
|
||||
}
|
||||
|
||||
if (this.inputFiles != null) {
|
||||
|
||||
@@ -26,25 +26,28 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Schema(
|
||||
title = "Delete expired keys globally for a specific namespace.",
|
||||
description = "This task will delete expired keys from the Kestra KV store. By default, it will only delete expired keys, but you can choose to delete all keys by setting `expiredOnly` to false. You can also filter keys by a specific pattern and choose to include child namespaces."
|
||||
title = "Purge namespace files for one or multiple namespaces.",
|
||||
description = "This task purges namespace files (and their versions) stored in Kestra. You can restrict the purge to specific namespaces (or a namespace glob pattern), optionally include child namespaces, and filter files by a glob pattern. The purge strategy is controlled via `behavior` (e.g. keep the last N versions and/or delete versions older than a given date)."
|
||||
)
|
||||
@Plugin(
|
||||
examples = {
|
||||
@Example(
|
||||
title = "Delete expired keys globally for a specific namespace, with or without including child namespaces.",
|
||||
title = "Purge old versions of namespace files for a namespace tree.",
|
||||
full = true,
|
||||
code = """
|
||||
id: purge_kv_store
|
||||
id: purge_namespace_files
|
||||
namespace: system
|
||||
|
||||
|
||||
tasks:
|
||||
- id: purge_kv
|
||||
type: io.kestra.plugin.core.kv.PurgeKV
|
||||
expiredOnly: true
|
||||
- id: purge_files
|
||||
type: io.kestra.plugin.core.namespace.PurgeFiles
|
||||
namespaces:
|
||||
- company
|
||||
includeChildNamespaces: true
|
||||
filePattern: "**/*.sql"
|
||||
behavior:
|
||||
type: version
|
||||
before: "2025-01-01T00:00:00Z"
|
||||
"""
|
||||
)
|
||||
}
|
||||
@@ -116,7 +119,7 @@ public class PurgeFiles extends Task implements PurgeTask<NamespaceFile>, Runnab
|
||||
@Getter
|
||||
public static class Output implements io.kestra.core.models.tasks.Output {
|
||||
@Schema(
|
||||
title = "The number of purged KV pairs"
|
||||
title = "The number of purged namespace file versions"
|
||||
)
|
||||
private Long size;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,107 @@
|
||||
package io.kestra.plugin.core.trigger;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.ExecutionTrigger;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.Backfill;
|
||||
import io.kestra.core.models.triggers.Schedulable;
|
||||
import io.kestra.core.models.triggers.TriggerContext;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.services.LabelService;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.chrono.ChronoZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Factory class for constructing a new {@link Execution} from a {@link Schedulable} trigger.
|
||||
*
|
||||
* @see io.kestra.plugin.core.trigger.Schedule
|
||||
* @see io.kestra.plugin.core.trigger.ScheduleOnDates
|
||||
*/
|
||||
final class SchedulableExecutionFactory {
|
||||
|
||||
static Execution createFailedExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext) throws IllegalVariableEvaluationException {
|
||||
return Execution.builder()
|
||||
.id(conditionContext.getRunContext().getTriggerExecutionId())
|
||||
.tenantId(triggerContext.getTenantId())
|
||||
.namespace(triggerContext.getNamespace())
|
||||
.flowId(triggerContext.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.labels(SchedulableExecutionFactory.getLabels(trigger, conditionContext.getRunContext(), triggerContext.getBackfill(), conditionContext.getFlow()))
|
||||
.state(new State().withState(State.Type.FAILED))
|
||||
.build();
|
||||
}
|
||||
|
||||
static Execution createExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext, Map<String, Object> variables, ZonedDateTime scheduleDate) throws IllegalVariableEvaluationException {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
ExecutionTrigger executionTrigger = ExecutionTrigger.of((AbstractTrigger) trigger, variables);
|
||||
|
||||
List<Label> labels = getLabels(trigger, runContext, triggerContext.getBackfill(), conditionContext.getFlow());
|
||||
|
||||
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
|
||||
executionLabels.add(new Label(Label.FROM, "trigger"));
|
||||
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
|
||||
// add a correlation ID if none exist
|
||||
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
|
||||
}
|
||||
|
||||
Execution execution = Execution.builder()
|
||||
.id(runContext.getTriggerExecutionId())
|
||||
.tenantId(triggerContext.getTenantId())
|
||||
.namespace(triggerContext.getNamespace())
|
||||
.flowId(triggerContext.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.variables(conditionContext.getFlow().getVariables())
|
||||
.labels(executionLabels)
|
||||
.state(new State())
|
||||
.trigger(executionTrigger)
|
||||
.scheduleDate(Optional.ofNullable(scheduleDate).map(ChronoZonedDateTime::toInstant).orElse(null))
|
||||
.build();
|
||||
|
||||
Map<String, Object> allInputs = getInputs(trigger, runContext, triggerContext.getBackfill());
|
||||
|
||||
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
|
||||
execution = execution.withInputs(runContext.inputAndOutput().readInputs(conditionContext.getFlow(), execution, allInputs));
|
||||
|
||||
return execution;
|
||||
}
|
||||
|
||||
private static Map<String, Object> getInputs(Schedulable trigger, RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
if (trigger.getInputs() != null) {
|
||||
inputs.putAll(runContext.render(trigger.getInputs()));
|
||||
}
|
||||
|
||||
if (backfill != null && backfill.getInputs() != null) {
|
||||
inputs.putAll(runContext.render(backfill.getInputs()));
|
||||
}
|
||||
|
||||
return inputs;
|
||||
}
|
||||
|
||||
private static List<Label> getLabels(Schedulable trigger, RunContext runContext, Backfill backfill, FlowInterface flow) throws IllegalVariableEvaluationException {
|
||||
List<Label> labels = LabelService.fromTrigger(runContext, flow, (AbstractTrigger) trigger);
|
||||
|
||||
if (backfill != null && backfill.getLabels() != null) {
|
||||
for (Label label : backfill.getLabels()) {
|
||||
final var value = runContext.render(label.value());
|
||||
if (value != null) {
|
||||
labels.add(new Label(label.key(), value));
|
||||
}
|
||||
}
|
||||
}
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
@@ -6,9 +6,7 @@ import com.cronutils.model.time.ExecutionTime;
|
||||
import com.cronutils.parser.CronParser;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.annotations.Example;
|
||||
import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
@@ -16,12 +14,8 @@ import io.kestra.core.models.conditions.Condition;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.conditions.ScheduleCondition;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.triggers.*;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.services.ConditionService;
|
||||
import io.kestra.core.services.LabelService;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.kestra.core.validations.ScheduleValidation;
|
||||
import io.kestra.core.validations.TimezoneId;
|
||||
@@ -29,6 +23,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Null;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -40,6 +35,8 @@ import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwPredicate;
|
||||
|
||||
@Slf4j
|
||||
@SuperBuilder
|
||||
@ToString
|
||||
@@ -224,11 +221,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
@PluginProperty
|
||||
@Deprecated
|
||||
private List<ScheduleCondition> scheduleConditions;
|
||||
|
||||
@Schema(
|
||||
title = "The inputs to pass to the scheduled flow"
|
||||
)
|
||||
@PluginProperty(dynamic = true)
|
||||
|
||||
private Map<String, Object> inputs;
|
||||
|
||||
@Schema(
|
||||
@@ -248,13 +241,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
@PluginProperty
|
||||
@Deprecated
|
||||
private Map<String, Object> backfill;
|
||||
|
||||
@Schema(
|
||||
title = "Action to take in the case of missed schedules",
|
||||
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
|
||||
"The default is `ALL` unless a different value is configured using the global plugin configuration."
|
||||
)
|
||||
@PluginProperty
|
||||
|
||||
private RecoverMissedSchedules recoverMissedSchedules;
|
||||
|
||||
@Override
|
||||
@@ -403,20 +390,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
if (!conditionResults) {
|
||||
return Optional.empty();
|
||||
}
|
||||
} catch(InternalException ie) {
|
||||
} catch (InternalException ie) {
|
||||
// validate schedule condition can fail to render variables
|
||||
// in this case, we return a failed execution so the trigger is not evaluated each second
|
||||
runContext.logger().error("Unable to evaluate the Schedule trigger '{}'", this.getId(), ie);
|
||||
Execution execution = Execution.builder()
|
||||
.id(runContext.getTriggerExecutionId())
|
||||
.tenantId(triggerContext.getTenantId())
|
||||
.namespace(triggerContext.getNamespace())
|
||||
.flowId(triggerContext.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.labels(generateLabels(runContext, conditionContext, backfill))
|
||||
.state(new State().withState(State.Type.FAILED))
|
||||
.build();
|
||||
return Optional.of(execution);
|
||||
return Optional.of(SchedulableExecutionFactory.createFailedExecution(this, conditionContext, triggerContext));
|
||||
}
|
||||
|
||||
// recalculate true output for previous and next based on conditions
|
||||
@@ -430,14 +408,12 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
variables = scheduleDates.toMap();
|
||||
}
|
||||
|
||||
Execution execution = TriggerService.generateScheduledExecution(
|
||||
Execution execution = SchedulableExecutionFactory.createExecution(
|
||||
this,
|
||||
conditionContext,
|
||||
triggerContext,
|
||||
generateLabels(runContext, conditionContext, backfill),
|
||||
generateInputs(runContext, backfill),
|
||||
variables,
|
||||
Optional.empty()
|
||||
null
|
||||
);
|
||||
|
||||
return Optional.of(execution);
|
||||
@@ -448,34 +424,6 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
return parser.parse(this.cron);
|
||||
}
|
||||
|
||||
private List<Label> generateLabels(RunContext runContext, ConditionContext conditionContext, Backfill backfill) throws IllegalVariableEvaluationException {
|
||||
List<Label> labels = LabelService.fromTrigger(runContext, conditionContext.getFlow(), this);
|
||||
|
||||
if (backfill != null && backfill.getLabels() != null) {
|
||||
for (Label label : backfill.getLabels()) {
|
||||
final var value = runContext.render(label.value());
|
||||
if (value != null) {
|
||||
labels.add(new Label(label.key(), value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
private Map<String, Object> generateInputs(RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
if (this.inputs != null) {
|
||||
inputs.putAll(runContext.render(this.inputs));
|
||||
}
|
||||
|
||||
if (backfill != null && backfill.getInputs() != null) {
|
||||
inputs.putAll(runContext.render(backfill.getInputs()));
|
||||
}
|
||||
|
||||
return inputs;
|
||||
}
|
||||
private Optional<Output> scheduleDates(ExecutionTime executionTime, ZonedDateTime date) {
|
||||
Optional<ZonedDateTime> next = executionTime.nextExecution(date.minus(Duration.ofSeconds(1)));
|
||||
|
||||
@@ -549,9 +497,9 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
Optional<ZonedDateTime> truePreviousNextDateWithCondition(ExecutionTime executionTime, ConditionContext conditionContext, ZonedDateTime toTestDate, boolean next) throws InternalException {
|
||||
int upperYearBound = ZonedDateTime.now().getYear() + 10;
|
||||
int lowerYearBound = ZonedDateTime.now().getYear() - 10;
|
||||
|
||||
|
||||
while ((next && toTestDate.getYear() < upperYearBound) || (!next && toTestDate.getYear() > lowerYearBound)) {
|
||||
|
||||
|
||||
Optional<ZonedDateTime> currentDate = next ?
|
||||
executionTime.nextExecution(toTestDate) :
|
||||
executionTime.lastExecution(toTestDate);
|
||||
@@ -607,11 +555,10 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
|
||||
private boolean validateScheduleCondition(ConditionContext conditionContext) throws InternalException {
|
||||
if (conditions != null) {
|
||||
ConditionService conditionService = ((DefaultRunContext)conditionContext.getRunContext()).getApplicationContext().getBean(ConditionService.class);
|
||||
return conditionService.isValid(
|
||||
conditions.stream().filter(c -> c instanceof ScheduleCondition).map(c -> (ScheduleCondition) c).toList(),
|
||||
conditionContext
|
||||
);
|
||||
return conditions.stream()
|
||||
.filter(c -> c instanceof ScheduleCondition)
|
||||
.map(c -> (ScheduleCondition) c)
|
||||
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
@@ -10,7 +10,6 @@ import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.tasks.VoidOutput;
|
||||
import io.kestra.core.models.triggers.*;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.services.LabelService;
|
||||
import io.kestra.core.validations.TimezoneId;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
@@ -23,7 +22,10 @@ import java.time.Duration;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
@@ -45,11 +47,7 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
@Builder.Default
|
||||
@Null
|
||||
private final Duration interval = null;
|
||||
|
||||
@Schema(
|
||||
title = "The inputs to pass to the scheduled flow"
|
||||
)
|
||||
@PluginProperty(dynamic = true)
|
||||
|
||||
private Map<String, Object> inputs;
|
||||
|
||||
@TimezoneId
|
||||
@@ -63,31 +61,24 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
@NotNull
|
||||
private Property<List<ZonedDateTime>> dates;
|
||||
|
||||
@Schema(
|
||||
title = "Action to take in the case of missed schedules",
|
||||
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
|
||||
"The default is `ALL` unless a different value is configured using the global plugin configuration."
|
||||
)
|
||||
@PluginProperty
|
||||
private RecoverMissedSchedules recoverMissedSchedules;
|
||||
|
||||
@Override
|
||||
public Optional<Execution> evaluate(ConditionContext conditionContext, TriggerContext triggerContext) throws Exception {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
|
||||
ZonedDateTime lastEvaluation = triggerContext.getDate();
|
||||
Optional<ZonedDateTime> nextDate = nextDate(runContext, date -> date.isEqual(lastEvaluation) || date.isAfter(lastEvaluation));
|
||||
|
||||
if (nextDate.isPresent()) {
|
||||
log.info("Schedule execution on {}", nextDate.get());
|
||||
|
||||
Execution execution = TriggerService.generateScheduledExecution(
|
||||
Execution execution = SchedulableExecutionFactory.createExecution(
|
||||
this,
|
||||
conditionContext,
|
||||
triggerContext,
|
||||
LabelService.fromTrigger(runContext, conditionContext.getFlow(), this),
|
||||
this.inputs != null ? runContext.render(this.inputs) : Collections.emptyMap(),
|
||||
Collections.emptyMap(),
|
||||
nextDate
|
||||
nextDate.orElse(null)
|
||||
);
|
||||
|
||||
return Optional.of(execution);
|
||||
@@ -97,29 +88,21 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
}
|
||||
|
||||
@Override
|
||||
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) {
|
||||
try {
|
||||
return last
|
||||
.map(throwFunction(context ->
|
||||
nextDate(conditionContext.getRunContext(), date -> date.isAfter(context.getDate()))
|
||||
.orElse(ZonedDateTime.now().plusYears(1))
|
||||
))
|
||||
.orElse(conditionContext.getRunContext()
|
||||
.render(dates)
|
||||
.asList(ZonedDateTime.class)
|
||||
.stream()
|
||||
.sorted()
|
||||
.findFirst()
|
||||
.orElse(ZonedDateTime.now()))
|
||||
.truncatedTo(ChronoUnit.SECONDS);
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
|
||||
return ZonedDateTime.now().plusYears(1);
|
||||
}
|
||||
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> triggerContext) {
|
||||
return triggerContext
|
||||
.map(ctx -> ctx.getBackfill() != null ? ctx.getBackfill().getCurrentDate() : ctx.getDate())
|
||||
.map(this::withTimeZone)
|
||||
.or(() -> Optional.of(ZonedDateTime.now()))
|
||||
.flatMap(dt -> {
|
||||
try {
|
||||
return nextDate(conditionContext.getRunContext(), date -> date.isAfter(dt));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
|
||||
throw new InvalidTriggerConfigurationException("Failed to evaluate schedule 'dates'. Cause: " + e.getMessage());
|
||||
}
|
||||
}).orElseGet(() -> ZonedDateTime.now().plusYears(1));
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public ZonedDateTime nextEvaluationDate() {
|
||||
// TODO this may be the next date from now?
|
||||
@@ -139,9 +122,17 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
return previousDates.isEmpty() ? ZonedDateTime.now() : previousDates.getFirst();
|
||||
}
|
||||
|
||||
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> filter) throws IllegalVariableEvaluationException {
|
||||
return runContext.render(dates).asList(ZonedDateTime.class).stream().sorted()
|
||||
.filter(date -> filter.test(date))
|
||||
private ZonedDateTime withTimeZone(ZonedDateTime date) {
|
||||
if (this.timezone == null) {
|
||||
return date;
|
||||
}
|
||||
return date.withZoneSameInstant(ZoneId.of(this.timezone));
|
||||
}
|
||||
|
||||
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> predicate) throws IllegalVariableEvaluationException {
|
||||
return runContext.render(dates)
|
||||
.asList(ZonedDateTime.class).stream().sorted()
|
||||
.filter(predicate)
|
||||
.map(throwFunction(date -> timezone == null ? date : date.withZoneSameInstant(ZoneId.of(runContext.render(timezone)))))
|
||||
.findFirst()
|
||||
.map(date -> date.truncatedTo(ChronoUnit.SECONDS));
|
||||
|
||||
@@ -9,10 +9,14 @@
|
||||
<property name="pattern" value="%date{HH:mm:ss}.%ms %highlight(%-5.5level) %magenta(%-12.36thread) %cyan(%-12.36logger{36}) %msg%n" />
|
||||
|
||||
<logger name="io.kestra" level="INFO" />
|
||||
<logger name="flow" level="INFO" />
|
||||
<logger name="task" level="INFO" />
|
||||
<logger name="execution" level="INFO" />
|
||||
<logger name="trigger" level="INFO" />
|
||||
|
||||
<!-- Flow execution logs - disabled by default -->
|
||||
<logger name="flow" level="OFF" />
|
||||
|
||||
<!-- Server loggers -->
|
||||
<logger name="worker" level="INFO" />
|
||||
<logger name="executor" level="INFO" />
|
||||
<logger name="scheduler" level="INFO" />
|
||||
|
||||
<logger name="io.kestra.ee.runner.kafka.services.KafkaConsumerService" level="WARN" />
|
||||
<logger name="io.kestra.ee.runner.kafka.services.KafkaProducerService" level="WARN" />
|
||||
|
||||
8
core/src/main/resources/metadata/chart.yaml
Normal file
8
core/src/main/resources/metadata/chart.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.chart
|
||||
name: "chart"
|
||||
title: "Chart"
|
||||
description: "Tasks that render dashboard charts from Kestra data sources."
|
||||
body: "Use these chart widgets to visualize metrics, executions, or flow trends in dashboards; pair them with dashboard data queries and configure aggregations, groupings, and chart options for Bar, Pie, Time Series, KPI, or Table outputs."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/condition.yaml
Normal file
8
core/src/main/resources/metadata/condition.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.condition
|
||||
name: "condition"
|
||||
title: "Condition"
|
||||
description: "Tasks that evaluate conditions to control flow execution or triggers."
|
||||
body: "Use these predicates to gate tasks or triggers based on time windows, calendars, execution metadata, labels, namespaces, retries, or custom expressions; configure required parameters such as allowed states, namespaces, date ranges, or JEXL expressions to return a true/false result."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/data.yaml
Normal file
8
core/src/main/resources/metadata/data.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.data
|
||||
name: "data"
|
||||
title: "Data"
|
||||
description: "Tasks that fetch Kestra executions, flows, logs, metrics, and triggers as datasets for dashboards."
|
||||
body: "These data providers query Kestra repositories with filters and aggregations to feed dashboard charts; configure columns and fields (such as namespace, state, timestamp, or labels) plus any filters to shape the returned dataset for visualization."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/debug.yaml
Normal file
8
core/src/main/resources/metadata/debug.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.debug
|
||||
name: "debug"
|
||||
title: "Debug"
|
||||
description: "Tasks that emit debug output while you develop a flow."
|
||||
body: "Echo and Return help inspect variables and payloads or short-circuit execution during testing; provide the message or value to output so downstream tasks can see exactly what is being passed around."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/execution.yaml
Normal file
8
core/src/main/resources/metadata/execution.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.execution
|
||||
name: "execution"
|
||||
title: "Execution"
|
||||
description: "Tasks that manage the lifecycle and context of a running execution."
|
||||
body: "Use these tasks to assert expectations, set or unset variables, add labels, fail, exit, resume, or purge executions; supply required properties such as variable maps, label key/values, or retention rules before altering execution state."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/flow.yaml
Normal file
8
core/src/main/resources/metadata/flow.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.flow
|
||||
name: "flow"
|
||||
title: "Flow"
|
||||
description: "Tasks that orchestrate control flow within a Kestra pipeline."
|
||||
body: "Sequence, branch, loop, parallelize, or nest subflows/templates using these primitives; define embedded task lists, values for switches, iteration collections, working directories, and loop exit criteria to structure complex workflows cleanly."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/http.yaml
Normal file
8
core/src/main/resources/metadata/http.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.http
|
||||
name: "http"
|
||||
title: "HTTP"
|
||||
description: "Tasks that interact with HTTP endpoints."
|
||||
body: "Perform requests, downloads, or webhook triggers with configurable methods, headers, authentication, and payloads; provide the target URI plus any body or query parameters, and use response handling options to store results for downstream tasks."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/index.yaml
Normal file
8
core/src/main/resources/metadata/index.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core
|
||||
name: "core"
|
||||
title: "Core Plugins and tasks"
|
||||
description: "Tasks that provide Kestra's built-in orchestration, I/O, and observability capabilities."
|
||||
body: "Core plugins cover control-flow, execution management, triggers, storage, HTTP, metrics, logging, templating, and dashboard widgets; combine these foundational tasks to build reliable workflows without adding external dependencies."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/kv.yaml
Normal file
8
core/src/main/resources/metadata/kv.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.kv
|
||||
name: "kv"
|
||||
title: "KV"
|
||||
description: "Tasks that manage key-value pairs in Kestra's KV store."
|
||||
body: "Set, get, list, version, and delete namespaced keys to share state across flows; specify the key path, value for writes, and optional namespace or TTL to control how data is stored, retrieved, and purged."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/log.yaml
Normal file
8
core/src/main/resources/metadata/log.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.log
|
||||
name: "log"
|
||||
title: "Log"
|
||||
description: "Tasks that write, fetch, or purge Kestra logs."
|
||||
body: "Emit structured log messages, retrieve stored logs, or clean up log storage; provide message content or log query filters and consider namespace or execution scoping when purging."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/metric.yaml
Normal file
8
core/src/main/resources/metadata/metric.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.metric
|
||||
name: "metric"
|
||||
title: "Metric"
|
||||
description: "Tasks that publish custom metrics from flows."
|
||||
body: "Send counters, gauges, and timing metrics to Kestra's metric store for dashboards and alerts; define the metric name, type, value, labels, and optional timestamp to record meaningful telemetry."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/namespace.yaml
Normal file
8
core/src/main/resources/metadata/namespace.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.namespace
|
||||
name: "namespace"
|
||||
title: "Namespace"
|
||||
description: "Tasks that manage namespace files and versions."
|
||||
body: "Upload, download, delete, purge, or version files stored in a namespace—useful for shipping assets or configs with flows; set the target namespace, paths or glob patterns, and purge behavior to control stored artifacts."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/output.yaml
Normal file
8
core/src/main/resources/metadata/output.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.output
|
||||
name: "output"
|
||||
title: "Output"
|
||||
description: "Tasks that expose outputs from a flow."
|
||||
body: "Use OutputValues to publish key-value outputs for downstream tasks or subflows; declare the output map and data types that consuming tasks should read."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/runner.yaml
Normal file
8
core/src/main/resources/metadata/runner.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.runner
|
||||
name: "runner"
|
||||
title: "Runner"
|
||||
description: "Tasks that execute commands on the Kestra worker."
|
||||
body: "Run shell processes with configurable command, environment, working directory, and input/output handling; ensure commands are idempotent and set expected exit codes or resource needs when invoking external binaries."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/storage.yaml
Normal file
8
core/src/main/resources/metadata/storage.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.storage
|
||||
name: "storage"
|
||||
title: "Storage"
|
||||
description: "Tasks that manipulate files in Kestra's internal storage."
|
||||
body: "Write, delete, concatenate, split, deduplicate, filter, reverse, size, or list files used by executions; provide source and target storage URIs and any encoding or line-handling options to transform stored data safely."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/templating.yaml
Normal file
8
core/src/main/resources/metadata/templating.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.templating
|
||||
name: "templating"
|
||||
title: "Templating"
|
||||
description: "Tasks that render dynamic task specifications from templates."
|
||||
body: "TemplatedTask lets you supply a Pebble-rendered YAML spec that is parsed and executed at runtime; provide the `spec` property with a valid runnable task definition and avoid recursive templating when composing dynamic tasks."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
8
core/src/main/resources/metadata/trigger.yaml
Normal file
8
core/src/main/resources/metadata/trigger.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
group: io.kestra.plugin.core.trigger
|
||||
name: "trigger"
|
||||
title: "Trigger"
|
||||
description: "Tasks that start flows from schedules or events."
|
||||
body: "Define cron-based schedules, specific date triggers, webhooks, namespace flow triggers, or toggles; set required properties like cron expressions, webhook secrets, and target flow references to control when executions fire."
|
||||
videos: []
|
||||
createdBy: "Kestra Core Team"
|
||||
managedBy: "Kestra Core Team"
|
||||
@@ -170,10 +170,11 @@ class JsonSchemaGeneratorTest {
|
||||
|
||||
Map<String, Object> jsonSchema = jsonSchemaGenerator.generate(AbstractTrigger.class, AbstractTrigger.class);
|
||||
assertThat((Map<String, Object>) jsonSchema.get("properties"), allOf(
|
||||
Matchers.aMapWithSize(3),
|
||||
Matchers.aMapWithSize(4),
|
||||
hasKey("conditions"),
|
||||
hasKey("stopAfter"),
|
||||
hasKey("type")
|
||||
hasKey("type"),
|
||||
hasKey("allowConcurrent")
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -134,4 +134,47 @@ class LabelTest {
|
||||
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
|
||||
assertThat(emptyKeyLabelResult.isPresent()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldValidateValidLabelKeys() {
|
||||
// Valid keys: start with lowercase; may contain letters, numbers, hyphens, underscores, periods
|
||||
assertThat(modelValidator.isValid(new Label("foo", "bar")).isPresent()).isFalse();
|
||||
assertThat(modelValidator.isValid(new Label("foo-bar", "value")).isPresent()).isFalse();
|
||||
assertThat(modelValidator.isValid(new Label("foo_bar", "value")).isPresent()).isFalse();
|
||||
assertThat(modelValidator.isValid(new Label("foo123", "value")).isPresent()).isFalse();
|
||||
assertThat(modelValidator.isValid(new Label("foo-bar_baz123", "value")).isPresent()).isFalse();
|
||||
assertThat(modelValidator.isValid(new Label("a", "value")).isPresent()).isFalse();
|
||||
assertThat(modelValidator.isValid(new Label("foo.bar", "value")).isPresent()).isFalse(); // dot is allowed
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldRejectInvalidLabelKeys() {
|
||||
|
||||
Optional<ConstraintViolationException> spaceResult = modelValidator.isValid(new Label("foo bar", "value"));
|
||||
assertThat(spaceResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> uppercaseResult = modelValidator.isValid(new Label("Foo", "value"));
|
||||
assertThat(uppercaseResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> emojiResult = modelValidator.isValid(new Label("💩", "value"));
|
||||
assertThat(emojiResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> atSignResult = modelValidator.isValid(new Label("foo@bar", "value"));
|
||||
assertThat(atSignResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> colonResult = modelValidator.isValid(new Label("foo:bar", "value"));
|
||||
assertThat(colonResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> hyphenStartResult = modelValidator.isValid(new Label("-foo", "value"));
|
||||
assertThat(hyphenStartResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> underscoreStartResult = modelValidator.isValid(new Label("_foo", "value"));
|
||||
assertThat(underscoreStartResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> zeroResult = modelValidator.isValid(new Label("0", "value"));
|
||||
assertThat(zeroResult.isPresent()).isTrue();
|
||||
|
||||
Optional<ConstraintViolationException> digitStartResult = modelValidator.isValid(new Label("9test", "value"));
|
||||
assertThat(digitStartResult.isPresent()).isTrue();
|
||||
}
|
||||
}
|
||||
@@ -61,6 +61,9 @@ public class QueryFilterTest {
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION),
|
||||
@@ -168,9 +171,6 @@ public class QueryFilterTest {
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION),
|
||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION),
|
||||
|
||||
@@ -185,4 +185,21 @@ class FlowTest {
|
||||
|
||||
return YamlParser.parse(file, Flow.class);
|
||||
}
|
||||
@Test
|
||||
void illegalNamespaceUpdate() {
|
||||
Flow original = Flow.builder()
|
||||
.id("my-flow")
|
||||
.namespace("io.kestra.prod")
|
||||
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("hello").build()))
|
||||
.build();
|
||||
|
||||
Flow updated = original.toBuilder()
|
||||
.namespace("io.kestra.dev")
|
||||
.build();
|
||||
|
||||
Optional<ConstraintViolationException> validate = original.validateUpdate(updated);
|
||||
|
||||
assertThat(validate.isPresent()).isTrue();
|
||||
assertThat(validate.get().getMessage()).contains("Illegal namespace update");
|
||||
}
|
||||
}
|
||||
@@ -60,6 +60,15 @@ class SystemInformationReportTest {
|
||||
return setting;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Setting internalSave(Setting setting) throws ConstraintViolationException {
|
||||
if (setting.getKey().equals(Setting.INSTANCE_UUID)) {
|
||||
UUID = setting.getValue();
|
||||
}
|
||||
|
||||
return setting;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Setting delete(Setting setting) {
|
||||
return setting;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import com.devskiller.friendly_id.FriendlyId;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.exceptions.InvalidQueryFiltersException;
|
||||
import io.kestra.core.junit.annotations.FlakyTest;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
@@ -24,7 +24,6 @@ import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.tasks.ResolvedTask;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.NamespaceUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
@@ -42,18 +41,17 @@ import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.*;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.models.flows.FlowScope.SYSTEM;
|
||||
import static io.kestra.core.models.flows.FlowScope.USER;
|
||||
import static java.time.temporal.ChronoUnit.MINUTES;
|
||||
import static java.time.temporal.ChronoUnit.SECONDS;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
@@ -81,6 +79,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.tenantId(tenantId)
|
||||
.flowId(flowId == null ? FLOW : flowId)
|
||||
.flowRevision(1)
|
||||
.kind(ExecutionKind.NORMAL)
|
||||
.state(finalState);
|
||||
|
||||
|
||||
@@ -184,6 +183,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
@FlakyTest(description = "Filtering tests are sometimes returning 0")
|
||||
void should_find_all(QueryFilter filter, int expectedSize){
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant, "executionTriggerId");
|
||||
@@ -196,15 +196,49 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
static Stream<Arguments> filterCombinations() {
|
||||
return Stream.of(
|
||||
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unittest").operation(Op.EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unused").operation(Op.NOT_EQUALS).build(), 29),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(USER)).operation(Op.EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.NOT_EQUALS).build(), 29),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.unittest").operation(Op.EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("not.this.one").operation(Op.NOT_EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittes").operation(Op.CONTAINS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.uni").operation(Op.STARTS_WITH).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittest").operation(Op.ENDS_WITH).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io\\.kestra\\.unittest").operation(Op.REGEX).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("io.kestra.unittest", "unused")).operation(Op.IN).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("unused.first", "unused.second")).operation(Op.NOT_IN).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra").operation(Op.PREFIX).build(), 29),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.NORMAL).operation(Op.EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.TEST).operation(Op.NOT_EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.NORMAL, ExecutionKind.PLAYGROUND)).operation(Op.IN).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.PLAYGROUND, ExecutionKind.TEST)).operation(Op.NOT_IN).build(), 29),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), 1),
|
||||
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "unknown")).operation(Op.NOT_EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value", "key2", "value2")).operation(Op.IN).build(), 1),
|
||||
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key1", "value1")).operation(Op.NOT_IN).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.LABELS).value("value").operation(Op.CONTAINS).build(), 1),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.EQUALS).build(), 16),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.NOT_EQUALS).build(), 13),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ul").operation(Op.CONTAINS).build(), 16),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.STARTS_WITH).build(), 16),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ull").operation(Op.ENDS_WITH).build(), 16),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("[ful]{4}").operation(Op.REGEX).build(), 16),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other")).operation(Op.IN).build(), 16),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other2")).operation(Op.NOT_IN).build(), 13),
|
||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.PREFIX).build(), 16),
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.STATE).value(Type.RUNNING).operation(Op.EQUALS).build(), 5),
|
||||
Arguments.of(QueryFilter.builder().field(Field.TRIGGER_EXECUTION_ID).value("executionTriggerId").operation(Op.EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29)
|
||||
|
||||
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29),
|
||||
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.NOT_EQUALS).build(), 0)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -656,6 +690,65 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
assertThat(data).first().hasFieldOrPropertyWithValue("id", execution.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void dashboard_fetchData_365Days_verifiesDateGrouping() throws IOException {
|
||||
var tenantId = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var executionDuration = Duration.ofMinutes(220);
|
||||
var executionCreateDate = Instant.now();
|
||||
|
||||
// Create an execution within the 365-day range
|
||||
Execution execution = Execution.builder()
|
||||
.tenantId(tenantId)
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("some-execution")
|
||||
.flowRevision(1)
|
||||
.labels(Label.from(Map.of("country", "FR")))
|
||||
.state(new State(Type.SUCCESS,
|
||||
List.of(new State.History(State.Type.CREATED, executionCreateDate), new State.History(Type.SUCCESS, executionCreateDate.plus(executionDuration)))))
|
||||
.taskRunList(List.of())
|
||||
.build();
|
||||
|
||||
execution = executionRepository.save(execution);
|
||||
|
||||
// Create an execution BEYOND 365 days (400 days ago) - should be filtered out
|
||||
var executionCreateDateOld = Instant.now().minus(Duration.ofDays(400));
|
||||
Execution executionOld = Execution.builder()
|
||||
.tenantId(tenantId)
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("some-execution-old")
|
||||
.flowRevision(1)
|
||||
.labels(Label.from(Map.of("country", "US")))
|
||||
.state(new State(Type.SUCCESS,
|
||||
List.of(new State.History(State.Type.CREATED, executionCreateDateOld), new State.History(Type.SUCCESS, executionCreateDateOld.plus(executionDuration)))))
|
||||
.taskRunList(List.of())
|
||||
.build();
|
||||
|
||||
executionRepository.save(executionOld);
|
||||
|
||||
var now = ZonedDateTime.now();
|
||||
ArrayListTotal<Map<String, Object>> data = executionRepository.fetchData(tenantId, Executions.builder()
|
||||
.type(Executions.class.getName())
|
||||
.columns(Map.of(
|
||||
"count", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).agg(AggregationType.COUNT).build(),
|
||||
"id", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).build(),
|
||||
"date", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.START_DATE).build(),
|
||||
"duration", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.DURATION).build()
|
||||
)).build(),
|
||||
now.minusDays(365),
|
||||
now,
|
||||
null
|
||||
);
|
||||
|
||||
// Should only return 1 execution (the recent one), not the 400-day-old execution
|
||||
assertThat(data.getTotal()).isGreaterThanOrEqualTo(1L);
|
||||
assertThat(data).isNotEmpty();
|
||||
assertThat(data).first().hasFieldOrProperty("count");
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static Execution buildWithCreatedDate(String tenant, Instant instant) {
|
||||
return Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
|
||||
@@ -121,7 +121,8 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
QueryFilter.builder().field(Field.QUERY).value("filterFlowId").operation(Op.EQUALS).build(),
|
||||
QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.EQUALS).build(),
|
||||
QueryFilter.builder().field(Field.NAMESPACE).value(SYSTEM_FLOWS_DEFAULT_NAMESPACE).operation(Op.EQUALS).build(),
|
||||
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build()
|
||||
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(),
|
||||
QueryFilter.builder().field(Field.FLOW_ID).value("filterFlowId").operation(Op.EQUALS).build()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -145,7 +146,6 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
static Stream<QueryFilter> errorFilterCombinations() {
|
||||
return Stream.of(
|
||||
QueryFilter.builder().field(Field.FLOW_ID).value("sleep").operation(Op.EQUALS).build(),
|
||||
QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(),
|
||||
QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(),
|
||||
QueryFilter.builder().field(Field.STATE).value(State.Type.RUNNING).operation(Op.EQUALS).build(),
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.junit.annotations.FlakyTest;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
|
||||
@KestraTest(startRunner = true)
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
public abstract class AbstractRunnerConcurrencyTest {
|
||||
@Inject
|
||||
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-cancel.yml"}, tenantId = "concurrency-cancel")
|
||||
void concurrencyCancel() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyCancel("concurrency-cancel");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-fail.yml"}, tenantId = "concurrency-fail")
|
||||
void concurrencyFail() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyFail("concurrency-fail");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-queue.yml"}, tenantId = "concurrency-queue")
|
||||
void concurrencyQueue() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueue("concurrency-queue");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-pause.yml"}, tenantId = "concurrency-queue-pause")
|
||||
protected void concurrencyQueuePause() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueuePause("concurrency-queue-pause");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-cancel-pause.yml"}, tenantId = "concurrency-cancel-pause")
|
||||
protected void concurrencyCancelPause() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyCancelPause("concurrency-cancel-pause");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = "flow-concurrency-with-for-each-item")
|
||||
protected void flowConcurrencyWithForEachItem() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem("flow-concurrency-with-for-each-item");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-fail.yml"}, tenantId = "concurrency-queue-restarted")
|
||||
protected void concurrencyQueueRestarted() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted("concurrency-queue-restarted");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-after-execution.yml"}, tenantId = "concurrency-queue-after-execution")
|
||||
void concurrencyQueueAfterExecution() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution("concurrency-queue-after-execution");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = "flow-concurrency-subflow")
|
||||
void flowConcurrencySubflow() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencySubflow("flow-concurrency-subflow");
|
||||
}
|
||||
|
||||
@Test
|
||||
@FlakyTest(description = "Only flaky in CI")
|
||||
@LoadFlows(
|
||||
value = {"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"},
|
||||
tenantId = "flow-concurrency-parallel-subflow-kill"
|
||||
)
|
||||
protected void flowConcurrencyParallelSubflowKill() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill("flow-concurrency-parallel-subflow-kill");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-killed.yml"}, tenantId = "flow-concurrency-killed")
|
||||
void flowConcurrencyKilled() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyKilled("flow-concurrency-killed");
|
||||
}
|
||||
|
||||
@Test
|
||||
@FlakyTest(description = "Only flaky in CI")
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-killed.yml"}, tenantId = "flow-concurrency-queue-killed")
|
||||
void flowConcurrencyQueueKilled() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueueKilled("flow-concurrency-queue-killed");
|
||||
}
|
||||
}
|
||||
@@ -66,9 +66,6 @@ public abstract class AbstractRunnerTest {
|
||||
@Inject
|
||||
protected LoopUntilCaseTest loopUntilTestCaseTest;
|
||||
|
||||
@Inject
|
||||
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
|
||||
|
||||
@Inject
|
||||
protected ScheduleDateCaseTest scheduleDateCaseTest;
|
||||
|
||||
@@ -422,66 +419,6 @@ public abstract class AbstractRunnerTest {
|
||||
forEachItemCaseTest.forEachItemWithAfterExecution();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-cancel.yml"})
|
||||
void concurrencyCancel() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyCancel();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-fail.yml"})
|
||||
void concurrencyFail() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyFail();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-queue.yml"})
|
||||
void concurrencyQueue() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueue();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-queue-pause.yml"})
|
||||
protected void concurrencyQueuePause() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueuePause();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-cancel-pause.yml"})
|
||||
protected void concurrencyCancelPause() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyCancelPause();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
|
||||
protected void flowConcurrencyWithForEachItem() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
|
||||
protected void concurrencyQueueRestarted() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-queue-after-execution.yml"})
|
||||
void concurrencyQueueAfterExecution() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
|
||||
void flowConcurrencySubflow() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"})
|
||||
void flowConcurrencyParallelSubflowKill() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill();
|
||||
}
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/executable-fail.yml")
|
||||
void badExecutable(Execution execution) {
|
||||
|
||||
@@ -31,7 +31,6 @@ import java.util.Optional;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@Singleton
|
||||
@@ -57,40 +56,42 @@ public class FlowConcurrencyCaseTest {
|
||||
@Named(QueueFactoryInterface.KILL_NAMED)
|
||||
protected QueueInterface<ExecutionKilled> killQueue;
|
||||
|
||||
public void flowConcurrencyCancel() throws TimeoutException, QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
|
||||
public void flowConcurrencyCancel(String tenantId) throws TimeoutException, QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
|
||||
try {
|
||||
List<Execution> shouldFailExecutions = List.of(
|
||||
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel"),
|
||||
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel")
|
||||
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-cancel"),
|
||||
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-cancel")
|
||||
);
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
|
||||
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(Type.CANCELLED::equals);
|
||||
} finally {
|
||||
runnerUtils.killExecution(execution1);
|
||||
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
|
||||
}
|
||||
}
|
||||
|
||||
public void flowConcurrencyFail() throws TimeoutException, QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
|
||||
public void flowConcurrencyFail(String tenantId) throws TimeoutException, QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
|
||||
try {
|
||||
List<Execution> shouldFailExecutions = List.of(
|
||||
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail"),
|
||||
runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail")
|
||||
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-fail"),
|
||||
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-fail")
|
||||
);
|
||||
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(State.Type.FAILED::equals);
|
||||
} finally {
|
||||
runnerUtils.killExecution(execution1);
|
||||
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
|
||||
}
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueue() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
|
||||
public void flowConcurrencyQueue(String tenantId) throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", Optional.empty())
|
||||
.findById(tenantId, NAMESPACE, "flow-concurrency-queue", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
|
||||
@@ -106,10 +107,10 @@ public class FlowConcurrencyCaseTest {
|
||||
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueuePause() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause");
|
||||
public void flowConcurrencyQueuePause(String tenantId) throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(tenantId, NAMESPACE, "flow-concurrency-queue-pause");
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
|
||||
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
|
||||
@@ -125,10 +126,10 @@ public class FlowConcurrencyCaseTest {
|
||||
assertThat(secondExecutionResult.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
public void flowConcurrencyCancelPause() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause");
|
||||
public void flowConcurrencyCancelPause(String tenantId) throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(tenantId, NAMESPACE, "flow-concurrency-cancel-pause");
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
|
||||
.findById(tenantId, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.CANCELLED), execution2);
|
||||
@@ -164,11 +165,11 @@ public class FlowConcurrencyCaseTest {
|
||||
.toList()).contains(Type.QUEUED);
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueueRestarted() throws Exception {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE,
|
||||
public void flowConcurrencyQueueRestarted(String tenantId) throws Exception {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE,
|
||||
"flow-concurrency-queue-fail", null, null, Duration.ofSeconds(30));
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
|
||||
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
|
||||
@@ -177,7 +178,10 @@ public class FlowConcurrencyCaseTest {
|
||||
// we restart the first one, it should be queued then fail again.
|
||||
Execution failedExecution = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution1);
|
||||
Execution restarted = executionService.restart(failedExecution, null);
|
||||
Execution executionResult1 = runnerUtils.restartExecution(e -> e.getState().getCurrent().equals(Type.FAILED), restarted);
|
||||
Execution executionResult1 = runnerUtils.restartExecution(
|
||||
e -> e.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED) && e.getState().getCurrent().equals(Type.FAILED),
|
||||
restarted
|
||||
);
|
||||
Execution executionResult2 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution2);
|
||||
|
||||
assertThat(executionResult1.getState().getCurrent()).isEqualTo(Type.FAILED);
|
||||
@@ -191,10 +195,10 @@ public class FlowConcurrencyCaseTest {
|
||||
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueueAfterExecution() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
|
||||
public void flowConcurrencyQueueAfterExecution(String tenantId) throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
|
||||
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
|
||||
@@ -214,15 +218,15 @@ public class FlowConcurrencyCaseTest {
|
||||
List<Execution> subFlowExecs = runnerUtils.awaitFlowExecutionNumber(2, tenantId, NAMESPACE, "flow-concurrency-cancel");
|
||||
assertThat(subFlowExecs).extracting(e -> e.getState().getCurrent()).containsExactlyInAnyOrder(Type.SUCCESS, Type.CANCELLED);
|
||||
|
||||
// run another execution to be sure that everything work (purge is correctly done)
|
||||
// run another execution to be sure that everything works (purge is correctly done)
|
||||
Execution execution3 = runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-subflow");
|
||||
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.SUCCESS);
|
||||
runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE, "flow-concurrency-cancel");
|
||||
}
|
||||
|
||||
public void flowConcurrencyParallelSubflowKill() throws QueueException {
|
||||
Execution parent = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-parallel-subflow-kill", null, null, Duration.ofSeconds(30));
|
||||
Execution queued = runnerUtils.awaitFlowExecution(e -> e.getState().isQueued(), MAIN_TENANT, NAMESPACE, "flow-concurrency-parallel-subflow-kill-child");
|
||||
public void flowConcurrencyParallelSubflowKill(String tenantId) throws QueueException {
|
||||
Execution parent = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-parallel-subflow-kill", null, null, Duration.ofSeconds(30));
|
||||
Execution queued = runnerUtils.awaitFlowExecution(e -> e.getState().isQueued(), tenantId, NAMESPACE, "flow-concurrency-parallel-subflow-kill-child");
|
||||
|
||||
// Kill the parent
|
||||
killQueue.emit(ExecutionKilledExecution
|
||||
@@ -230,7 +234,7 @@ public class FlowConcurrencyCaseTest {
|
||||
.state(ExecutionKilled.State.REQUESTED)
|
||||
.executionId(parent.getId())
|
||||
.isOnKillCascade(true)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenantId)
|
||||
.build()
|
||||
);
|
||||
|
||||
@@ -240,6 +244,92 @@ public class FlowConcurrencyCaseTest {
|
||||
assertThat(terminated.getTaskRunList()).isNull();
|
||||
}
|
||||
|
||||
public void flowConcurrencyKilled(String tenantId) throws QueueException, InterruptedException {
|
||||
Flow flow = flowRepository
|
||||
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
|
||||
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||
|
||||
try {
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||
|
||||
// we kill execution 1, execution 2 should run but not execution 3
|
||||
killQueue.emit(ExecutionKilledExecution
|
||||
.builder()
|
||||
.state(ExecutionKilled.State.REQUESTED)
|
||||
.executionId(execution1.getId())
|
||||
.isOnKillCascade(true)
|
||||
.tenantId(tenantId)
|
||||
.build()
|
||||
);
|
||||
|
||||
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution1);
|
||||
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
|
||||
assertThat(killed.getState().getHistories().stream().anyMatch(h -> h.getState() == Type.RUNNING)).isTrue();
|
||||
|
||||
// we now check that execution 2 is running
|
||||
Execution running = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
|
||||
assertThat(running.getState().getCurrent()).isEqualTo(Type.RUNNING);
|
||||
|
||||
// we check that execution 3 is still queued
|
||||
Thread.sleep(100); // wait a little to be 100% sure
|
||||
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
|
||||
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||
} finally {
|
||||
// kill everything to avoid dangling executions
|
||||
runnerUtils.killExecution(execution2);
|
||||
runnerUtils.killExecution(execution3);
|
||||
|
||||
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
|
||||
runnerUtils.awaitFlowExecutionNumber(3, tenantId, NAMESPACE, "flow-concurrency-queue-killed");
|
||||
}
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueueKilled(String tenantId) throws QueueException, InterruptedException {
|
||||
Flow flow = flowRepository
|
||||
.findById(tenantId, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
|
||||
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||
|
||||
try {
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||
|
||||
// we kill execution 2, execution 3 should not run
|
||||
killQueue.emit(ExecutionKilledExecution
|
||||
.builder()
|
||||
.state(ExecutionKilled.State.REQUESTED)
|
||||
.executionId(execution2.getId())
|
||||
.isOnKillCascade(true)
|
||||
.tenantId(tenantId)
|
||||
.build()
|
||||
);
|
||||
|
||||
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution2);
|
||||
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
|
||||
assertThat(killed.getState().getHistories().stream().noneMatch(h -> h.getState() == Type.RUNNING)).isTrue();
|
||||
|
||||
// we now check that execution 3 is still queued
|
||||
Thread.sleep(100); // wait a little to be 100% sure
|
||||
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
|
||||
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||
} finally {
|
||||
// kill everything to avoid dangling executions
|
||||
runnerUtils.killExecution(execution1);
|
||||
runnerUtils.killExecution(execution3);
|
||||
|
||||
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
|
||||
runnerUtils.awaitFlowExecutionNumber(3, tenantId, NAMESPACE, "flow-concurrency-queue-killed");
|
||||
}
|
||||
}
|
||||
|
||||
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
|
||||
File tempFile = File.createTempFile("file", ".txt");
|
||||
|
||||
|
||||
@@ -2,9 +2,7 @@ package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.DependsOn;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.flows.Type;
|
||||
import io.kestra.core.models.flows.*;
|
||||
import io.kestra.core.models.flows.input.FileInput;
|
||||
import io.kestra.core.models.flows.input.InputAndValue;
|
||||
import io.kestra.core.models.flows.input.IntInput;
|
||||
@@ -32,6 +30,7 @@ import org.reactivestreams.Publisher;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.nio.ByteBuffer;
|
||||
@@ -45,10 +44,10 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
class FlowInputOutputTest {
|
||||
|
||||
|
||||
private static final String TEST_SECRET_VALUE = "test-secret-value";
|
||||
private static final String TEST_KV_VALUE = "test-kv-value";
|
||||
|
||||
|
||||
static final Execution DEFAULT_TEST_EXECUTION = Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.flowId(IdUtils.create())
|
||||
@@ -64,7 +63,7 @@ class FlowInputOutputTest {
|
||||
|
||||
@Inject
|
||||
KvMetadataRepositoryInterface kvMetadataRepository;
|
||||
|
||||
|
||||
@MockBean(SecretService.class)
|
||||
SecretService testSecretService() {
|
||||
return new SecretService() {
|
||||
@@ -74,7 +73,7 @@ class FlowInputOutputTest {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@MockBean(KVStoreService.class)
|
||||
KVStoreService testKVStoreService() {
|
||||
return new KVStoreService() {
|
||||
@@ -89,7 +88,7 @@ class FlowInputOutputTest {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldResolveEnabledInputsGivenInputWithConditionalExpressionMatchingTrue() {
|
||||
// Given
|
||||
@@ -294,7 +293,7 @@ class FlowInputOutputTest {
|
||||
values
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void resolveInputsGivenDefaultExpressions() {
|
||||
// Given
|
||||
@@ -311,14 +310,14 @@ class FlowInputOutputTest {
|
||||
.required(false)
|
||||
.dependsOn(new DependsOn(List.of("input1"),null))
|
||||
.build();
|
||||
|
||||
|
||||
List<Input<?>> inputs = List.of(input1, input2);
|
||||
|
||||
|
||||
Map<String, Object> data = Map.of("input42", "foo");
|
||||
|
||||
|
||||
// When
|
||||
List<InputAndValue> values = flowInputOutput.resolveInputs(inputs, null, DEFAULT_TEST_EXECUTION, data);
|
||||
|
||||
|
||||
// Then
|
||||
Assertions.assertEquals(
|
||||
List.of(
|
||||
@@ -327,7 +326,7 @@ class FlowInputOutputTest {
|
||||
values
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldObfuscateSecretsWhenValidatingInputs() {
|
||||
// Given
|
||||
@@ -337,14 +336,14 @@ class FlowInputOutputTest {
|
||||
.defaults(Property.ofExpression("{{ secret('???') }}"))
|
||||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
// When
|
||||
List<InputAndValue> results = flowInputOutput.validateExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
|
||||
|
||||
|
||||
// Then
|
||||
Assertions.assertEquals("******", results.getFirst().value());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldNotObfuscateSecretsInSelectWhenValidatingInputs() {
|
||||
// Given
|
||||
@@ -354,10 +353,10 @@ class FlowInputOutputTest {
|
||||
.expression("{{ [secret('???')] }}")
|
||||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
// When
|
||||
List<InputAndValue> results = flowInputOutput.validateExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
|
||||
|
||||
|
||||
// Then
|
||||
Assertions.assertEquals(TEST_SECRET_VALUE, ((MultiselectInput)results.getFirst().input()).getValues().getFirst());
|
||||
}
|
||||
@@ -371,14 +370,14 @@ class FlowInputOutputTest {
|
||||
.defaults(Property.ofExpression("{{ secret('???') }}"))
|
||||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
// When
|
||||
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
|
||||
|
||||
|
||||
// Then
|
||||
Assertions.assertEquals(TEST_SECRET_VALUE, results.get("input"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldEvaluateExpressionOnDefaultsUsingKVFunction() {
|
||||
// Given
|
||||
@@ -388,14 +387,14 @@ class FlowInputOutputTest {
|
||||
.defaults(Property.ofExpression("{{ kv('???') }}"))
|
||||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
// When
|
||||
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
|
||||
|
||||
|
||||
// Then
|
||||
assertThat(results.get("input")).isEqualTo(TEST_KV_VALUE);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldGetDefaultWhenPassingNoDataForRequiredInput() {
|
||||
// Given
|
||||
@@ -404,50 +403,84 @@ class FlowInputOutputTest {
|
||||
.type(Type.STRING)
|
||||
.defaults(Property.ofValue("default"))
|
||||
.build();
|
||||
|
||||
|
||||
// When
|
||||
Map<String, Object> results = flowInputOutput.readExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, Mono.empty()).block();
|
||||
|
||||
|
||||
// Then
|
||||
assertThat(results.get("input")).isEqualTo("default");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldResolveZeroByteFileUpload() throws java.io.IOException {
|
||||
File tempFile = File.createTempFile("empty", ".txt");
|
||||
tempFile.deleteOnExit();
|
||||
|
||||
io.micronaut.http.multipart.CompletedFileUpload fileUpload = org.mockito.Mockito.mock(io.micronaut.http.multipart.CompletedFileUpload.class);
|
||||
org.mockito.Mockito.when(fileUpload.getInputStream()).thenReturn(new java.io.FileInputStream(tempFile));
|
||||
org.mockito.Mockito.when(fileUpload.getFilename()).thenReturn("empty.txt");
|
||||
org.mockito.Mockito.when(fileUpload.getName()).thenReturn("empty_file");
|
||||
|
||||
Execution execution = Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.tenantId("unit_test_tenant")
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("unittest")
|
||||
.flowRevision(1)
|
||||
.state(new State())
|
||||
.build();
|
||||
|
||||
reactor.core.publisher.Mono<Map<String, Object>> result = flowInputOutput.readExecutionInputs(
|
||||
List.of(
|
||||
io.kestra.core.models.flows.input.FileInput.builder().id("empty_file").type(Type.FILE).build()
|
||||
),
|
||||
Flow.builder().id("unittest").namespace("io.kestra.unittest").build(),
|
||||
execution,
|
||||
reactor.core.publisher.Flux.just(fileUpload)
|
||||
);
|
||||
|
||||
Map<String, Object> outputs = result.block();
|
||||
|
||||
Assertions.assertNotNull(outputs);
|
||||
Assertions.assertTrue(outputs.containsKey("empty_file"));
|
||||
}
|
||||
|
||||
private static class MemoryCompletedPart implements CompletedPart {
|
||||
|
||||
|
||||
protected final String name;
|
||||
protected final byte[] content;
|
||||
|
||||
|
||||
public MemoryCompletedPart(String name, byte[] content) {
|
||||
this.name = name;
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
return new ByteArrayInputStream(content);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public byte[] getBytes() {
|
||||
return content;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ByteBuffer getByteBuffer() {
|
||||
return ByteBuffer.wrap(content);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Optional<MediaType> getContentType() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static final class MemoryCompletedFileUpload extends MemoryCompletedPart implements CompletedFileUpload {
|
||||
|
||||
private final String fileName;
|
||||
@@ -456,7 +489,7 @@ class FlowInputOutputTest {
|
||||
super(name, content);
|
||||
this.fileName = fileName;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getFilename() {
|
||||
return fileName;
|
||||
|
||||
@@ -56,6 +56,18 @@ public class InputsTest {
|
||||
@Inject
|
||||
private NamespaceFactory namespaceFactory;
|
||||
|
||||
private static final Map<String , Object> object = Map.of(
|
||||
"people", List.of(
|
||||
Map.of(
|
||||
"first", "Mustafa",
|
||||
"last", "Tarek"
|
||||
),
|
||||
Map.of(
|
||||
"first", "Ahmed",
|
||||
"last", "Tarek"
|
||||
)
|
||||
)
|
||||
);
|
||||
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
|
||||
.put("string", "myString")
|
||||
.put("enum", "ENUM_VALUE")
|
||||
@@ -67,7 +79,6 @@ public class InputsTest {
|
||||
.put("time", "18:27:49")
|
||||
.put("duration", "PT5M6S")
|
||||
.put("file", Objects.requireNonNull(InputsTest.class.getClassLoader().getResource("application-test.yml")).getPath())
|
||||
.put("json", "{\"a\": \"b\"}")
|
||||
.put("uri", "https://www.google.com")
|
||||
.put("nested.string", "a string")
|
||||
.put("nested.more.int", "123")
|
||||
@@ -81,11 +92,14 @@ public class InputsTest {
|
||||
.put("validatedTime", "11:27:49")
|
||||
.put("secret", "secret")
|
||||
.put("array", "[1, 2, 3]")
|
||||
.put("yaml", """
|
||||
.put("json1", "{\"a\": \"b\"}")
|
||||
.put("json2", object)
|
||||
.put("yaml1", """
|
||||
some: property
|
||||
alist:
|
||||
- of
|
||||
- values""")
|
||||
.put("yaml2", object)
|
||||
.build();
|
||||
|
||||
@Inject
|
||||
@@ -154,7 +168,6 @@ public class InputsTest {
|
||||
assertThat(typeds.get("duration")).isEqualTo(Duration.parse("PT5M6S"));
|
||||
assertThat((URI) typeds.get("file")).isEqualTo(new URI("kestra:///io/kestra/tests/inputs/executions/test/inputs/file/application-test.yml"));
|
||||
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get("tenant1", null, (URI) typeds.get("file"))))).isEqualTo(CharStreams.toString(new InputStreamReader(new FileInputStream((String) inputs.get("file")))));
|
||||
assertThat(typeds.get("json")).isEqualTo(Map.of("a", "b"));
|
||||
assertThat(typeds.get("uri")).isEqualTo("https://www.google.com");
|
||||
assertThat(((Map<String, Object>) typeds.get("nested")).get("string")).isEqualTo("a string");
|
||||
assertThat((Boolean) ((Map<String, Object>) typeds.get("nested")).get("bool")).isTrue();
|
||||
@@ -170,9 +183,12 @@ public class InputsTest {
|
||||
assertThat(typeds.get("array")).isInstanceOf(List.class);
|
||||
assertThat((List<Integer>) typeds.get("array")).hasSize(3);
|
||||
assertThat((List<Integer>) typeds.get("array")).isEqualTo(List.of(1, 2, 3));
|
||||
assertThat(typeds.get("yaml")).isEqualTo(Map.of(
|
||||
assertThat(typeds.get("json1")).isEqualTo(Map.of("a", "b"));
|
||||
assertThat(typeds.get("json2")).isEqualTo(object);
|
||||
assertThat(typeds.get("yaml1")).isEqualTo(Map.of(
|
||||
"some", "property",
|
||||
"alist", List.of("of", "values")));
|
||||
assertThat(typeds.get("yaml2")).isEqualTo(object);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -201,7 +217,7 @@ public class InputsTest {
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs)
|
||||
);
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(14);
|
||||
assertThat(execution.getTaskRunList()).hasSize(16);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).matches("kestra:///io/kestra/tests/inputs/executions/.*/inputs/file/application-test.yml");
|
||||
// secret inputs are decrypted to be used as task properties
|
||||
@@ -354,19 +370,19 @@ public class InputsTest {
|
||||
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant14")
|
||||
void inputEmptyJson() {
|
||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||
map.put("json", "{}");
|
||||
map.put("json1", "{}");
|
||||
|
||||
Map<String, Object> typeds = typedInputs(map, "tenant14");
|
||||
|
||||
assertThat(typeds.get("json")).isInstanceOf(Map.class);
|
||||
assertThat(((Map<?, ?>) typeds.get("json")).size()).isZero();
|
||||
assertThat(typeds.get("json1")).isInstanceOf(Map.class);
|
||||
assertThat(((Map<?, ?>) typeds.get("json1")).size()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant15")
|
||||
void inputEmptyJsonFlow() throws TimeoutException, QueueException {
|
||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||
map.put("json", "{}");
|
||||
map.put("json1", "{}");
|
||||
|
||||
Execution execution = runnerUtils.runOne(
|
||||
"tenant15",
|
||||
@@ -376,11 +392,11 @@ public class InputsTest {
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, map)
|
||||
);
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(14);
|
||||
assertThat(execution.getTaskRunList()).hasSize(16);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
assertThat(execution.getInputs().get("json")).isInstanceOf(Map.class);
|
||||
assertThat(((Map<?, ?>) execution.getInputs().get("json")).size()).isZero();
|
||||
assertThat(execution.getInputs().get("json1")).isInstanceOf(Map.class);
|
||||
assertThat(((Map<?, ?>) execution.getInputs().get("json1")).size()).isZero();
|
||||
assertThat((String) execution.findTaskRunsByTaskId("jsonOutput").getFirst().getOutputs().get("value")).isEqualTo("{}");
|
||||
}
|
||||
|
||||
|
||||
@@ -122,10 +122,10 @@ class YamlParserTest {
|
||||
void inputs() {
|
||||
Flow flow = this.parse("flows/valids/inputs.yaml");
|
||||
|
||||
assertThat(flow.getInputs().size()).isEqualTo(29);
|
||||
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(11L);
|
||||
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(18L);
|
||||
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(3L);
|
||||
assertThat(flow.getInputs().size()).isEqualTo(31);
|
||||
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(12L);
|
||||
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(19L);
|
||||
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(4L);
|
||||
assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput stringInput && stringInput.getValidator() != null).count()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.Setting;
|
||||
import io.kestra.core.repositories.SettingRepositoryInterface;
|
||||
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@KestraTest
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@MicronautTest
|
||||
public class EditionProviderTest {
|
||||
@Inject
|
||||
private EditionProvider editionProvider;
|
||||
|
||||
@Inject
|
||||
private SettingRepositoryInterface settingRepository;
|
||||
|
||||
protected EditionProvider.Edition expectedEdition() {
|
||||
return EditionProvider.Edition.OSS;
|
||||
}
|
||||
@@ -17,5 +26,10 @@ public class EditionProviderTest {
|
||||
@Test
|
||||
void shouldReturnCurrentEdition() {
|
||||
Assertions.assertEquals(expectedEdition(), editionProvider.get());
|
||||
|
||||
// check that the edition is persisted in settings
|
||||
Optional<Setting> editionSettings = settingRepository.findByKey(Setting.INSTANCE_EDITION);
|
||||
assertThat(editionSettings).isPresent();
|
||||
assertThat(editionSettings.get().getValue()).isEqualTo(expectedEdition().name());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,8 +48,8 @@ class ListUtilsTest {
|
||||
void convertToListString(){
|
||||
assertThat(ListUtils.convertToListString(List.of("string1", "string2"))).isEqualTo(List.of("string1", "string2"));
|
||||
assertThat(ListUtils.convertToListString(List.of())).isEqualTo(List.of());
|
||||
assertThat(ListUtils.convertToListString(List.of(1, 2, 3))).isEqualTo(List.of("1", "2", "3"));
|
||||
|
||||
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString("not a list"));
|
||||
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString(List.of(1, 2, 3)));
|
||||
}
|
||||
}
|
||||
@@ -1,48 +1,107 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import ch.qos.logback.classic.Logger;
|
||||
import ch.qos.logback.classic.LoggerContext;
|
||||
import ch.qos.logback.classic.spi.ILoggingEvent;
|
||||
import ch.qos.logback.core.AppenderBase;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.TaskRun;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.triggers.TriggerContext;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@Slf4j
|
||||
class LogsTest {
|
||||
|
||||
|
||||
private static final InMemoryAppender MEMORY_APPENDER = new InMemoryAppender();
|
||||
|
||||
@BeforeAll
|
||||
static void setupLogger() {
|
||||
Logger logger = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
|
||||
MEMORY_APPENDER.setContext((LoggerContext) LoggerFactory.getILoggerFactory());
|
||||
MEMORY_APPENDER.start();
|
||||
logger.addAppender(MEMORY_APPENDER);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void clearLogs() {
|
||||
MEMORY_APPENDER.clear();
|
||||
}
|
||||
|
||||
@Test
|
||||
void logFlow() {
|
||||
var flow = Flow.builder().namespace("namespace").id("flow").build();
|
||||
var flow = Flow.builder().tenantId("tenant").namespace("namespace").id("flow").build();
|
||||
Logs.logExecution(flow, log, Level.INFO, "Some log");
|
||||
Logs.logExecution(flow, log, Level.INFO, "Some log with an {}", "attribute");
|
||||
Logs.logExecution(flow, log, Level.ERROR, "Some log with an {} and an error", "attribute", new RuntimeException("Test Exception"));
|
||||
|
||||
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
|
||||
assertThat(logs).hasSize(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void logExecution() {
|
||||
var execution = Execution.builder().namespace("namespace").flowId("flow").id("execution").build();
|
||||
Logs.logExecution(execution, log, Level.INFO, "Some log");
|
||||
Logs.logExecution(execution, log, Level.INFO, "Some log with an {}", "attribute");
|
||||
var execution = Execution.builder().tenantId("tenant").namespace("namespace").flowId("flow").id("execution").build();
|
||||
Logs.logExecution(execution, Level.INFO, "Some log");
|
||||
Logs.logExecution(execution, Level.INFO, "Some log with an {}", "attribute");
|
||||
Logs.logExecution(execution, Level.INFO, "Some log");
|
||||
|
||||
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
|
||||
assertThat(logs).hasSize(3);
|
||||
assertThat(logs.getFirst().getLoggerName()).isEqualTo("executor.tenant.namespace.flow");
|
||||
}
|
||||
|
||||
@Test
|
||||
void logTrigger() {
|
||||
var trigger = TriggerContext.builder().namespace("namespace").flowId("flow").triggerId("trigger").build();
|
||||
Logs.logTrigger(trigger, log, Level.INFO, "Some log");
|
||||
Logs.logTrigger(trigger, log, Level.INFO, "Some log with an {}", "attribute");
|
||||
var trigger = TriggerContext.builder().tenantId("tenant").namespace("namespace").flowId("flow").triggerId("trigger").build();
|
||||
Logs.logTrigger(trigger, Level.INFO, "Some log");
|
||||
Logs.logTrigger(trigger, Level.INFO, "Some log with an {}", "attribute");
|
||||
Logs.logTrigger(trigger, Level.INFO, "Some log");
|
||||
|
||||
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
|
||||
assertThat(logs).hasSize(3);
|
||||
assertThat(logs.getFirst().getLoggerName()).isEqualTo("scheduler.tenant.namespace.flow.trigger");
|
||||
}
|
||||
|
||||
@Test
|
||||
void logTaskRun() {
|
||||
var taskRun = TaskRun.builder().namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").build();
|
||||
var taskRun = TaskRun.builder().tenantId("tenant").namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").build();
|
||||
Logs.logTaskRun(taskRun, Level.INFO, "Some log");
|
||||
Logs.logTaskRun(taskRun, Level.INFO, "Some log with an {}", "attribute");
|
||||
|
||||
taskRun = TaskRun.builder().namespace("namespace").flowId("flow").executionId("execution").taskId("task").id("taskRun").value("value").build();
|
||||
Logs.logTaskRun(taskRun, Level.INFO, "Some log");
|
||||
Logs.logTaskRun(taskRun, Level.INFO, "Some log with an {}", "attribute");
|
||||
|
||||
List<ILoggingEvent> logs = MEMORY_APPENDER.getLogs();
|
||||
assertThat(logs).hasSize(4);
|
||||
assertThat(logs.getFirst().getLoggerName()).isEqualTo("worker.tenant.namespace.flow.task");
|
||||
}
|
||||
|
||||
private static class InMemoryAppender extends AppenderBase<ILoggingEvent> {
|
||||
private final List<ILoggingEvent> logs = new CopyOnWriteArrayList<>();
|
||||
|
||||
@Override
|
||||
protected void append(ILoggingEvent event) {
|
||||
logs.add(event);
|
||||
}
|
||||
|
||||
public List<ILoggingEvent> getLogs() {
|
||||
return logs;
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
logs.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -216,4 +216,23 @@ class MapUtilsTest {
|
||||
"k1.k4", "v2"
|
||||
));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
void mergeShouldNotDuplicateListElements() {
|
||||
Map<String, Object> first = Map.of(
|
||||
"key1", "value1",
|
||||
"key2", List.of("something", "else")
|
||||
);
|
||||
Map<String, Object> second = Map.of(
|
||||
"key2", List.of("something", "other"),
|
||||
"key3", "value3"
|
||||
);
|
||||
|
||||
Map<String, Object> results = MapUtils.merge(first, second);
|
||||
|
||||
assertThat(results).hasSize(3);
|
||||
List<String> list = (List<String>) results.get("key2");
|
||||
assertThat(list).hasSize(3);
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,6 @@ import org.junit.jupiter.api.parallel.ExecutionMode;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
@@ -45,9 +44,6 @@ class NamespaceFilesUtilsTest {
|
||||
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
|
||||
QueueInterface<LogEntry> workerTaskLogQueue;
|
||||
|
||||
@Inject
|
||||
NamespaceFilesUtils namespaceFilesUtils;
|
||||
|
||||
@Inject
|
||||
NamespaceFactory namespaceFactory;
|
||||
|
||||
@@ -66,7 +62,7 @@ class NamespaceFilesUtilsTest {
|
||||
namespaceStorage.putFile(Path.of("/" + i + ".txt"), data);
|
||||
}
|
||||
|
||||
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().build());
|
||||
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().build());
|
||||
|
||||
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
|
||||
receive.blockLast();
|
||||
@@ -91,7 +87,7 @@ class NamespaceFilesUtilsTest {
|
||||
namespaceStorage.putFile(Path.of("/" + i + ".txt"), data);
|
||||
}
|
||||
|
||||
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
|
||||
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
|
||||
|
||||
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
|
||||
receive.blockLast();
|
||||
@@ -116,7 +112,7 @@ class NamespaceFilesUtilsTest {
|
||||
namespaceStorage.putFile(Path.of("/folder2/test.txt"), data);
|
||||
namespaceStorage.putFile(Path.of("/test.txt"), data);
|
||||
|
||||
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
|
||||
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder().namespaces(Property.ofValue(List.of(namespace))).build());
|
||||
|
||||
List<LogEntry> logEntry = TestsUtils.awaitLogs(logs, 1);
|
||||
receive.blockLast();
|
||||
@@ -141,7 +137,7 @@ class NamespaceFilesUtilsTest {
|
||||
namespaceFactory.of(MAIN_TENANT, ns1, storageInterface).putFile(Path.of("/test.txt"), data);
|
||||
namespaceFactory.of(MAIN_TENANT, ns2, storageInterface).putFile(Path.of("/test.txt"), data);
|
||||
|
||||
namespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder()
|
||||
NamespaceFilesUtils.loadNamespaceFiles(runContext, NamespaceFiles.builder()
|
||||
.namespaces(Property.ofValue(List.of(ns1, ns2)))
|
||||
.folderPerNamespace(Property.ofValue(true))
|
||||
.build());
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import io.kestra.core.models.Setting;
|
||||
import io.kestra.core.repositories.SettingRepositoryInterface;
|
||||
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@MicronautTest
|
||||
class VersionProviderTest {
|
||||
@Inject
|
||||
private VersionProvider versionProvider;
|
||||
|
||||
@Inject
|
||||
private SettingRepositoryInterface settingRepository;
|
||||
|
||||
@Test
|
||||
void shouldResolveVersion() {
|
||||
assertThat(versionProvider.getVersion()).endsWith("-SNAPSHOT");
|
||||
|
||||
// check that the version is persisted in settings
|
||||
Optional<Setting> versionSettings = settingRepository.findByKey(Setting.INSTANCE_VERSION);
|
||||
assertThat(versionSettings).isPresent();
|
||||
assertThat(versionSettings.get().getValue()).isEqualTo(versionProvider.getVersion());
|
||||
}
|
||||
}
|
||||
@@ -9,9 +9,15 @@ import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import io.kestra.core.models.validations.ValidateConstraintViolation;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.JsonLocation;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.List;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.util.Optional;
|
||||
@@ -23,6 +29,107 @@ class FlowValidationTest {
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
private FlowService flowService;
|
||||
|
||||
private static final ObjectMapper mapper = new ObjectMapper();
|
||||
|
||||
// Helper class to create JsonProcessingException with location
|
||||
private static class TestJsonProcessingException extends JsonProcessingException {
|
||||
public TestJsonProcessingException(String msg, JsonLocation location) {
|
||||
super(msg, location);
|
||||
}
|
||||
public TestJsonProcessingException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_WithExpectedFieldName() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Expected a field name", new JsonLocation(null, 100, 5, 10));
|
||||
Object dummyTarget = new Object(); // Dummy target for toConstraintViolationException
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").contains("(at line 5)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_WithMappingStartEvent() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("MappingStartEvent", new JsonLocation(null, 200, 3, 5));
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Unexpected mapping start").contains("(at line 3)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_WithScalarValue() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Scalar value", new JsonLocation(null, 150, 7, 12));
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Expected a simple value").contains("(at line 7)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_GenericError() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Some other error", new JsonLocation(null, 50, 2, 8));
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML parsing error: Some other error").contains("(at line 2)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_NoLocation() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Expected a field name");
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").doesNotContain("at line");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testValidateFlowWithYamlSyntaxError() {
|
||||
String invalidYaml = """
|
||||
id: test-flow
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id:hello
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: {{ abc }}
|
||||
|
||||
""";
|
||||
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", invalidYaml);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.getFirst().getConstraints()).contains("YAML parsing error").contains("at line");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testValidateFlowWithUndefinedVariable() {
|
||||
String yamlWithUndefinedVar = """
|
||||
id: test-flow
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: hello
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: {{ undefinedVar }}
|
||||
""";
|
||||
|
||||
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", yamlWithUndefinedVar);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.getFirst().getConstraints()).contains("Validation error");
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalidRecursiveFlow() {
|
||||
Flow flow = this.parse("flows/invalids/recursive-flow.yaml");
|
||||
@@ -130,4 +237,4 @@ class FlowValidationTest {
|
||||
|
||||
return YamlParser.parse(file, Flow.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import io.kestra.core.models.flows.Output;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.State.History;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.InputAndOutput;
|
||||
import io.kestra.core.runners.SubflowExecutionResult;
|
||||
import io.kestra.core.services.VariablesService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -46,11 +47,15 @@ class SubflowTest {
|
||||
@Mock
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Mock
|
||||
private InputAndOutput inputAndOutput;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
Mockito.when(applicationContext.getBean(VariablesService.class)).thenReturn(new VariablesService());
|
||||
Mockito.when(runContext.logger()).thenReturn(LOG);
|
||||
Mockito.when(runContext.getApplicationContext()).thenReturn(applicationContext);
|
||||
Mockito.when(runContext.inputAndOutput()).thenReturn(inputAndOutput);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -118,7 +123,7 @@ class SubflowTest {
|
||||
|
||||
Map<String, Object> outputs = Map.of("key", "value");
|
||||
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(outputs);
|
||||
|
||||
Mockito.when(inputAndOutput.renderOutputs(Mockito.anyList())).thenReturn(Map.of("key", "value"));
|
||||
|
||||
Subflow subflow = Subflow.builder()
|
||||
.outputs(outputs)
|
||||
@@ -159,6 +164,7 @@ class SubflowTest {
|
||||
|
||||
Output output = Output.builder().id("key").value("value").build();
|
||||
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(Map.of(output.getId(), output.getValue()));
|
||||
Mockito.when(inputAndOutput.typedOutputs(Mockito.any(), Mockito.any(), Mockito.anyMap())).thenReturn(Map.of("key", "value"));
|
||||
Flow flow = Flow.builder()
|
||||
.outputs(List.of(output))
|
||||
.build();
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
package io.kestra.plugin.core.flow;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.as;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.junit.annotations.ExecuteFlow;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
@@ -100,4 +102,14 @@ class SwitchTest {
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
}
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/switch-in-concurrent-loop.yaml")
|
||||
void switchInConcurrentLoop(Execution execution) {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(execution.getTaskRunList()).hasSize(5);
|
||||
// we check that OOMCRM_EB_DD_000 and OOMCRM_EB_DD_001 have been processed once
|
||||
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_000")).count()).isEqualTo(1);
|
||||
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_001")).count()).isEqualTo(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ class ScheduleOnDatesTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() throws Exception {
|
||||
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() {
|
||||
// given
|
||||
var now = ZonedDateTime.now();
|
||||
var before = now.minusMinutes(1).truncatedTo(ChronoUnit.SECONDS);
|
||||
@@ -75,7 +75,7 @@ class ScheduleOnDatesTest {
|
||||
ZonedDateTime nextDate = scheduleOnDates.nextEvaluationDate(conditionContext, Optional.empty());
|
||||
|
||||
// then
|
||||
assertThat(nextDate).isEqualTo(before);
|
||||
assertThat(nextDate).isEqualTo(after);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -13,6 +13,7 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.Type;
|
||||
import io.kestra.core.models.flows.input.StringInput;
|
||||
import io.kestra.core.models.flows.input.MultiselectInput;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.TriggerContext;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
@@ -103,8 +104,9 @@ class ScheduleTest {
|
||||
);
|
||||
|
||||
assertThat(evaluate.isPresent()).isTrue();
|
||||
assertThat(evaluate.get().getLabels()).hasSize(3);
|
||||
assertThat(evaluate.get().getLabels()).hasSize(4);
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
|
||||
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
|
||||
var vars = evaluate.get().getTrigger().getVariables();
|
||||
var inputs = evaluate.get().getInputs();
|
||||
@@ -137,8 +139,9 @@ class ScheduleTest {
|
||||
);
|
||||
|
||||
assertThat(evaluate.isPresent()).isTrue();
|
||||
assertThat(evaluate.get().getLabels()).hasSize(3);
|
||||
assertThat(evaluate.get().getLabels()).hasSize(4);
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
|
||||
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
|
||||
var inputs = evaluate.get().getInputs();
|
||||
|
||||
@@ -475,6 +478,81 @@ class ScheduleTest {
|
||||
assertThat(result.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
|
||||
}
|
||||
|
||||
@Test
|
||||
void successWithMultiselectInputDefaults() throws Exception {
|
||||
Schedule trigger = Schedule.builder().id("schedule").type(Schedule.class.getName()).cron("0 0 1 * *").build();
|
||||
|
||||
ZonedDateTime date = ZonedDateTime.now()
|
||||
.withDayOfMonth(1)
|
||||
.withHour(0)
|
||||
.withMinute(0)
|
||||
.withSecond(0)
|
||||
.truncatedTo(ChronoUnit.SECONDS)
|
||||
.minusMonths(1);
|
||||
|
||||
Optional<Execution> evaluate = trigger.evaluate(
|
||||
conditionContextWithMultiselectInput(trigger),
|
||||
triggerContext(date, trigger));
|
||||
|
||||
assertThat(evaluate.isPresent()).isTrue();
|
||||
var inputs = evaluate.get().getInputs();
|
||||
|
||||
// Verify MULTISELECT input with explicit defaults works correctly
|
||||
assertThat(inputs.get("multiselectInput")).isEqualTo(List.of("option1", "option2"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void successWithMultiselectInputAutoSelectFirst() throws Exception {
|
||||
Schedule trigger = Schedule.builder().id("schedule").type(Schedule.class.getName()).cron("0 0 1 * *").build();
|
||||
|
||||
ZonedDateTime date = ZonedDateTime.now()
|
||||
.withDayOfMonth(1)
|
||||
.withHour(0)
|
||||
.withMinute(0)
|
||||
.withSecond(0)
|
||||
.truncatedTo(ChronoUnit.SECONDS)
|
||||
.minusMonths(1);
|
||||
|
||||
Optional<Execution> evaluate = trigger.evaluate(
|
||||
conditionContextWithMultiselectAutoSelectFirst(trigger),
|
||||
triggerContext(date, trigger));
|
||||
|
||||
assertThat(evaluate.isPresent()).isTrue();
|
||||
var inputs = evaluate.get().getInputs();
|
||||
|
||||
// Verify MULTISELECT input with autoSelectFirst defaults to first option
|
||||
assertThat(inputs.get("multiselectAutoSelect")).isEqualTo(List.of("first"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void successWithMultiselectInputProvidedValue() throws Exception {
|
||||
// Test that provided values override defaults for MULTISELECT
|
||||
Schedule trigger = Schedule.builder()
|
||||
.id("schedule")
|
||||
.type(Schedule.class.getName())
|
||||
.cron("0 0 1 * *")
|
||||
.inputs(Map.of("multiselectInput", List.of("option3")))
|
||||
.build();
|
||||
|
||||
ZonedDateTime date = ZonedDateTime.now()
|
||||
.withDayOfMonth(1)
|
||||
.withHour(0)
|
||||
.withMinute(0)
|
||||
.withSecond(0)
|
||||
.truncatedTo(ChronoUnit.SECONDS)
|
||||
.minusMonths(1);
|
||||
|
||||
Optional<Execution> evaluate = trigger.evaluate(
|
||||
conditionContextWithMultiselectInput(trigger),
|
||||
triggerContext(date, trigger));
|
||||
|
||||
assertThat(evaluate.isPresent()).isTrue();
|
||||
var inputs = evaluate.get().getInputs();
|
||||
|
||||
// Verify provided value overrides defaults
|
||||
assertThat(inputs.get("multiselectInput")).isEqualTo(List.of("option3"));
|
||||
}
|
||||
|
||||
private ConditionContext conditionContext(AbstractTrigger trigger) {
|
||||
Flow flow = Flow.builder()
|
||||
.id(IdUtils.create())
|
||||
@@ -504,17 +582,79 @@ class ScheduleTest {
|
||||
.build();
|
||||
}
|
||||
|
||||
private ConditionContext conditionContextWithMultiselectInput(AbstractTrigger trigger) {
|
||||
Flow flow = Flow.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.tests")
|
||||
.labels(
|
||||
List.of(
|
||||
new Label("flow-label-1", "flow-label-1"),
|
||||
new Label("flow-label-2", "flow-label-2")))
|
||||
.variables(Map.of("custom_var", "VARIABLE VALUE"))
|
||||
.inputs(List.of(
|
||||
MultiselectInput.builder()
|
||||
.id("multiselectInput")
|
||||
.type(Type.MULTISELECT)
|
||||
.values(List.of("option1", "option2", "option3"))
|
||||
.defaults(Property.ofValue(List.of("option1", "option2")))
|
||||
.build()))
|
||||
.build();
|
||||
|
||||
TriggerContext triggerContext = TriggerContext.builder()
|
||||
.namespace(flow.getNamespace())
|
||||
.flowId(flow.getId())
|
||||
.triggerId(trigger.getId())
|
||||
.build();
|
||||
|
||||
return ConditionContext.builder()
|
||||
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(),
|
||||
triggerContext, trigger))
|
||||
.flow(flow)
|
||||
.build();
|
||||
}
|
||||
|
||||
private ConditionContext conditionContextWithMultiselectAutoSelectFirst(AbstractTrigger trigger) {
|
||||
Flow flow = Flow.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.tests")
|
||||
.labels(
|
||||
List.of(
|
||||
new Label("flow-label-1", "flow-label-1"),
|
||||
new Label("flow-label-2", "flow-label-2")))
|
||||
.variables(Map.of("custom_var", "VARIABLE VALUE"))
|
||||
.inputs(List.of(
|
||||
MultiselectInput.builder()
|
||||
.id("multiselectAutoSelect")
|
||||
.type(Type.MULTISELECT)
|
||||
.values(List.of("first", "second", "third"))
|
||||
.autoSelectFirst(true)
|
||||
.build()))
|
||||
.build();
|
||||
|
||||
TriggerContext triggerContext = TriggerContext.builder()
|
||||
.namespace(flow.getNamespace())
|
||||
.flowId(flow.getId())
|
||||
.triggerId(trigger.getId())
|
||||
.build();
|
||||
|
||||
return ConditionContext.builder()
|
||||
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(),
|
||||
triggerContext, trigger))
|
||||
.flow(flow)
|
||||
.build();
|
||||
}
|
||||
|
||||
private ZonedDateTime dateFromVars(String date, ZonedDateTime expexted) {
|
||||
return ZonedDateTime.parse(date).withZoneSameInstant(expexted.getZone());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldGetNextExecutionDateWithConditionMatchingFutureDate() throws InternalException {
|
||||
|
||||
|
||||
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
|
||||
OffsetTime before = now.minusHours(1).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
OffsetTime after = now.minusHours(4).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
|
||||
|
||||
Schedule trigger = Schedule.builder()
|
||||
.id("schedule").type(Schedule.class.getName())
|
||||
.cron("0 * * * *") // every hour
|
||||
@@ -527,25 +667,25 @@ class ScheduleTest {
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
|
||||
|
||||
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
|
||||
|
||||
|
||||
ConditionContext conditionContext = ConditionContext.builder()
|
||||
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
|
||||
.build();
|
||||
|
||||
|
||||
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
|
||||
assertThat(result).isNotEmpty();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldGetNextExecutionDateWithConditionMatchingCurrentDate() throws InternalException {
|
||||
|
||||
|
||||
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
|
||||
|
||||
OffsetTime before = now.plusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
OffsetTime after = now.minusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
|
||||
|
||||
Schedule trigger = Schedule.builder()
|
||||
.id("schedule").type(Schedule.class.getName())
|
||||
.cron("*/30 * * * * *")
|
||||
@@ -558,13 +698,13 @@ class ScheduleTest {
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
|
||||
|
||||
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
|
||||
|
||||
|
||||
ConditionContext conditionContext = ConditionContext.builder()
|
||||
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
|
||||
.build();
|
||||
|
||||
|
||||
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
|
||||
assertThat(result).isNotEmpty();
|
||||
}
|
||||
|
||||
@@ -8,4 +8,4 @@ concurrency:
|
||||
tasks:
|
||||
- id: sleep
|
||||
type: io.kestra.plugin.core.flow.Sleep
|
||||
duration: PT10S
|
||||
duration: PT2S
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
id: flow-concurrency-queue-killed
|
||||
namespace: io.kestra.tests
|
||||
|
||||
concurrency:
|
||||
behavior: QUEUE
|
||||
limit: 1
|
||||
|
||||
tasks:
|
||||
- id: sleep
|
||||
type: io.kestra.plugin.core.flow.Sleep
|
||||
duration: PT1M
|
||||
@@ -41,7 +41,10 @@ inputs:
|
||||
- id: instantDefaults
|
||||
type: DATETIME
|
||||
defaults: "2013-08-09T14:19:00Z"
|
||||
- id: json
|
||||
- id: json1
|
||||
type: JSON
|
||||
required: false
|
||||
- id: json2
|
||||
type: JSON
|
||||
required: false
|
||||
- id: uri
|
||||
@@ -95,7 +98,7 @@ inputs:
|
||||
- name: array
|
||||
type: ARRAY
|
||||
itemType: INT
|
||||
- name: yaml
|
||||
- name: yaml1
|
||||
type: YAML
|
||||
defaults:
|
||||
property: something
|
||||
@@ -104,6 +107,15 @@ inputs:
|
||||
value: value1
|
||||
- key: key2
|
||||
value: value2
|
||||
- name: yaml2
|
||||
type: YAML
|
||||
defaults:
|
||||
property: something
|
||||
list:
|
||||
- key: key1
|
||||
value: value1
|
||||
- key: key2
|
||||
value: value2
|
||||
# required true and an empty default value will only work if we correctly serialize default values which is what this input is about to test.
|
||||
- name: empty
|
||||
type: STRING
|
||||
@@ -140,12 +152,18 @@ tasks:
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "{{taskrun.value}}"
|
||||
|
||||
- id: json
|
||||
- id: json1
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "{{inputs.json}}"
|
||||
format: "{{inputs.json1}}"
|
||||
- id: json2
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "{{inputs.json2}}"
|
||||
- id: jsonOutput
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "{{outputs.json.value}}"
|
||||
- id: yamlOutput
|
||||
format: "{{outputs.json1.value}}"
|
||||
- id: yamlOutput1
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "{{inputs.yaml}}"
|
||||
format: "{{inputs.yaml1}}"
|
||||
- id: yamlOutput2
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "{{inputs.yaml2}}"
|
||||
@@ -0,0 +1,23 @@
|
||||
id: switch-in-concurrent-loop
|
||||
namespace: io.kestra.tests
|
||||
|
||||
tasks:
|
||||
- id: iterate_and_check_name
|
||||
type: io.kestra.plugin.core.flow.ForEach
|
||||
tasks:
|
||||
- id: switch
|
||||
type: io.kestra.plugin.core.flow.Switch
|
||||
value: "{{ taskrun.value }}"
|
||||
cases:
|
||||
"Alice":
|
||||
- id: OOMCRM_EB_DD_000
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Alice
|
||||
"Bob":
|
||||
- id: OOMCRM_EB_DD_001
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Bob
|
||||
|
||||
values: ["Alice", "Bob"]
|
||||
|
||||
concurrencyLimit: 0
|
||||
@@ -13,18 +13,19 @@ tasks:
|
||||
- io.test.second
|
||||
- io.test.third
|
||||
enabled: true
|
||||
folderPerNamespace: true
|
||||
exclude:
|
||||
- /ignore/**
|
||||
tasks:
|
||||
- id: t1
|
||||
type: io.kestra.core.tasks.test.Read
|
||||
path: "/test/a/b/c/1.txt"
|
||||
path: "/io.test.third/test/a/b/c/1.txt"
|
||||
- id: t2
|
||||
type: io.kestra.core.tasks.test.Read
|
||||
path: "/a/b/c/2.txt"
|
||||
path: "/io.test.second/a/b/c/2.txt"
|
||||
- id: t3
|
||||
type: io.kestra.core.tasks.test.Read
|
||||
path: "/a/b/3.txt"
|
||||
path: "/io.test.first/a/b/3.txt"
|
||||
- id: t4
|
||||
type: io.kestra.core.tasks.test.Read
|
||||
path: "/ignore/4.txt"
|
||||
|
||||
@@ -402,10 +402,11 @@ public class ExecutorService {
|
||||
|
||||
if (flow.getOutputs() != null) {
|
||||
RunContext runContext = runContextFactory.of(executor.getFlow(), executor.getExecution());
|
||||
var inputAndOutput = runContext.inputAndOutput();
|
||||
|
||||
try {
|
||||
Map<String, Object> outputs = FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext);
|
||||
outputs = flowInputOutput.typedOutputs(flow, executor.getExecution(), outputs);
|
||||
Map<String, Object> outputs = inputAndOutput.renderOutputs(flow.getOutputs());
|
||||
outputs = inputAndOutput.typedOutputs(flow, executor.getExecution(), outputs);
|
||||
newExecution = newExecution.withOutputs(outputs);
|
||||
} catch (Exception e) {
|
||||
Logs.logExecution(
|
||||
|
||||
@@ -16,7 +16,7 @@ public final class H2RepositoryUtils {
|
||||
case MONTH:
|
||||
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM')", Date.class);
|
||||
case WEEK:
|
||||
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'YYYY-ww')", Date.class);
|
||||
return DSL.field("DATE_TRUNC('WEEK', \"" + dateField + "\")", Date.class);
|
||||
case DAY:
|
||||
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM-dd')", Date.class);
|
||||
case HOUR:
|
||||
|
||||
@@ -3,5 +3,5 @@ package io.kestra.repository.h2;
|
||||
import io.kestra.jdbc.repository.AbstractJdbcFlowRepositoryTest;
|
||||
|
||||
public class H2FlowRepositoryTest extends AbstractJdbcFlowRepositoryTest {
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
package io.kestra.runner.h2;
|
||||
|
||||
import io.kestra.jdbc.runner.JdbcConcurrencyRunnerTest;
|
||||
|
||||
public class H2RunnerConcurrencyTest extends JdbcConcurrencyRunnerTest {
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user