mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 14:00:23 -05:00
Compare commits
48 Commits
fix/trigge
...
feat/entit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
63e11c7d94 | ||
|
|
11e199da33 | ||
|
|
5d5165b7b9 | ||
|
|
44d0c10713 | ||
|
|
167734e32a | ||
|
|
24e61c81c0 | ||
|
|
379764a033 | ||
|
|
d55dd275c3 | ||
|
|
f409657e8a | ||
|
|
22f0b3ffdf | ||
|
|
0d99dc6862 | ||
|
|
fd3adc48b8 | ||
|
|
1a8a47c8cd | ||
|
|
7ea95f393e | ||
|
|
6935900699 | ||
|
|
0bc8e8d74a | ||
|
|
7f77b24ae0 | ||
|
|
ec6820dc25 | ||
|
|
d94193c143 | ||
|
|
c9628047fa | ||
|
|
4cbc069af4 | ||
|
|
eabe573fe6 | ||
|
|
ecd64617c3 | ||
|
|
a5650bca0f | ||
|
|
ed59e262d4 | ||
|
|
a5f9d54f7d | ||
|
|
47f4f43198 | ||
|
|
5d31c97f7f | ||
|
|
f8107285c4 | ||
|
|
8dc8dc1796 | ||
|
|
834dfd2947 | ||
|
|
6edb88841f | ||
|
|
5653531628 | ||
|
|
ee61276106 | ||
|
|
abcf76f7b4 | ||
|
|
67ada7f61b | ||
|
|
0c13633f77 | ||
|
|
a6cf2015ff | ||
|
|
2f9216c70b | ||
|
|
1903e6fac5 | ||
|
|
2d2cb00cab | ||
|
|
01b5441d16 | ||
|
|
efc778e294 | ||
|
|
60235a4e73 | ||
|
|
b167c52e76 | ||
|
|
216b124294 | ||
|
|
b6e4df8de2 | ||
|
|
429e7c7945 |
2
.github/workflows/vulnerabilities-check.yml
vendored
2
.github/workflows/vulnerabilities-check.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
|
||||
# Upload dependency check report
|
||||
- name: Upload dependency check report
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v6
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: dependency-check-report
|
||||
|
||||
@@ -21,7 +21,7 @@ plugins {
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "7.2.0.6526"
|
||||
id "org.sonarqube" version "7.2.1.6560"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
@@ -331,7 +331,7 @@ subprojects {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
agent "org.aspectj:aspectjweaver:1.9.25"
|
||||
agent "org.aspectj:aspectjweaver:1.9.25.1"
|
||||
}
|
||||
|
||||
test {
|
||||
|
||||
@@ -137,6 +137,11 @@ flyway:
|
||||
# We must ignore missing migrations as we delete some wrong or not used anymore migrations
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
properties:
|
||||
flyway:
|
||||
postgresql:
|
||||
transactional:
|
||||
lock: false
|
||||
mysql:
|
||||
enabled: true
|
||||
locations:
|
||||
|
||||
@@ -82,8 +82,8 @@ dependencies {
|
||||
testImplementation "io.micronaut:micronaut-http-server-netty"
|
||||
testImplementation "io.micronaut:micronaut-management"
|
||||
|
||||
testImplementation "org.testcontainers:testcontainers:1.21.3"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
|
||||
testImplementation "org.testcontainers:testcontainers:1.21.4"
|
||||
testImplementation "org.testcontainers:junit-jupiter:1.21.4"
|
||||
testImplementation "org.bouncycastle:bcpkix-jdk18on"
|
||||
|
||||
testImplementation "org.wiremock:wiremock-jetty12"
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
package io.kestra.core.exceptions;
|
||||
|
||||
import io.kestra.core.models.flows.Data;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.flows.Output;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Exception that can be thrown when Inputs/Outputs have validation problems.
|
||||
*/
|
||||
public class InputOutputValidationException extends KestraRuntimeException {
|
||||
public InputOutputValidationException(String message) {
|
||||
super(message);
|
||||
}
|
||||
public static InputOutputValidationException of( String message, Input<?> input){
|
||||
String inputMessage = "Invalid value for input" + " `" + input.getId() + "`. Cause: " + message;
|
||||
return new InputOutputValidationException(inputMessage);
|
||||
}
|
||||
public static InputOutputValidationException of( String message, Output output){
|
||||
String outputMessage = "Invalid value for output" + " `" + output.getId() + "`. Cause: " + message;
|
||||
return new InputOutputValidationException(outputMessage);
|
||||
}
|
||||
public static InputOutputValidationException of(String message){
|
||||
return new InputOutputValidationException(message);
|
||||
}
|
||||
|
||||
public static InputOutputValidationException merge(Set<InputOutputValidationException> exceptions){
|
||||
String combinedMessage = exceptions.stream()
|
||||
.map(InputOutputValidationException::getMessage)
|
||||
.collect(Collectors.joining(System.lineSeparator()));
|
||||
throw new InputOutputValidationException(combinedMessage);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
package io.kestra.core.exceptions;
|
||||
|
||||
import java.io.Serial;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* The top-level {@link KestraRuntimeException} for non-recoverable errors.
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
public interface DeletedInterface {
|
||||
boolean isDeleted();
|
||||
}
|
||||
@@ -26,6 +26,7 @@ public record Label(
|
||||
public static final String REPLAYED = SYSTEM_PREFIX + "replayed";
|
||||
public static final String SIMULATED_EXECUTION = SYSTEM_PREFIX + "simulatedExecution";
|
||||
public static final String TEST = SYSTEM_PREFIX + "test";
|
||||
public static final String FROM = SYSTEM_PREFIX + "from";
|
||||
|
||||
/**
|
||||
* Static helper method for converting a list of labels to a nested map.
|
||||
|
||||
@@ -16,6 +16,7 @@ import jakarta.validation.constraints.NotNull;
|
||||
public class Setting {
|
||||
public static final String INSTANCE_UUID = "instance.uuid";
|
||||
public static final String INSTANCE_VERSION = "instance.version";
|
||||
public static final String INSTANCE_EDITION = "instance.edition";
|
||||
|
||||
@NotNull
|
||||
private String key;
|
||||
|
||||
18
core/src/main/java/io/kestra/core/models/SoftDeletable.java
Normal file
18
core/src/main/java/io/kestra/core/models/SoftDeletable.java
Normal file
@@ -0,0 +1,18 @@
|
||||
package io.kestra.core.models;
|
||||
|
||||
/**
|
||||
* This interface marks entities that implement a soft deletion mechanism.
|
||||
* Soft deletion is based on a <code>deleted</code> field that is set to <code>true</code> when the entity is deleted.
|
||||
* Physical deletion either never occurs or occurs in a dedicated purge mechanism.
|
||||
*/
|
||||
public interface SoftDeletable<T> {
|
||||
/**
|
||||
* Whether en entity is deleted or not.
|
||||
*/
|
||||
boolean isDeleted();
|
||||
|
||||
/**
|
||||
* Delete the current entity: set its <code>deleted</code> field to <code>true</code>.
|
||||
*/
|
||||
T toDeleted();
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
package io.kestra.core.models.dashboards;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.dashboards.charts.Chart;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
@@ -26,7 +26,7 @@ import java.util.Objects;
|
||||
@NoArgsConstructor
|
||||
@Introspected
|
||||
@ToString
|
||||
public class Dashboard implements HasUID, DeletedInterface {
|
||||
public class Dashboard implements HasUID, SoftDeletable<Dashboard> {
|
||||
@Hidden
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
|
||||
private String tenantId;
|
||||
@@ -71,6 +71,7 @@ public class Dashboard implements HasUID, DeletedInterface {
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Dashboard toDeleted() {
|
||||
return this.toBuilder()
|
||||
.deleted(true)
|
||||
|
||||
@@ -11,7 +11,7 @@ import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Streams;
|
||||
import io.kestra.core.debug.Breakpoint;
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
@@ -53,7 +53,7 @@ import java.util.zip.CRC32;
|
||||
@AllArgsConstructor
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
public class Execution implements DeletedInterface, TenantInterface {
|
||||
public class Execution implements SoftDeletable<Execution>, TenantInterface {
|
||||
|
||||
@With
|
||||
@Hidden
|
||||
@@ -1111,7 +1111,7 @@ public class Execution implements DeletedInterface, TenantInterface {
|
||||
.toList();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Execution toDeleted() {
|
||||
return this.toBuilder()
|
||||
.deleted(true)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.core.models.executions;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
@@ -22,7 +21,7 @@ import java.util.stream.Stream;
|
||||
|
||||
@Value
|
||||
@Builder(toBuilder = true)
|
||||
public class LogEntry implements DeletedInterface, TenantInterface {
|
||||
public class LogEntry implements TenantInterface {
|
||||
@Hidden
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
|
||||
String tenantId;
|
||||
@@ -57,10 +56,6 @@ public class LogEntry implements DeletedInterface, TenantInterface {
|
||||
|
||||
String message;
|
||||
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
boolean deleted = false;
|
||||
|
||||
@Nullable
|
||||
ExecutionKind executionKind;
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.core.models.executions;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.executions.metrics.Counter;
|
||||
import io.kestra.core.models.executions.metrics.Gauge;
|
||||
@@ -18,7 +17,7 @@ import jakarta.validation.constraints.Pattern;
|
||||
|
||||
@Value
|
||||
@Builder(toBuilder = true)
|
||||
public class MetricEntry implements DeletedInterface, TenantInterface {
|
||||
public class MetricEntry implements TenantInterface {
|
||||
@Hidden
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
|
||||
String tenantId;
|
||||
@@ -54,10 +53,6 @@ public class MetricEntry implements DeletedInterface, TenantInterface {
|
||||
@Nullable
|
||||
Map<String, String> tags;
|
||||
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
boolean deleted = false;
|
||||
|
||||
@Nullable
|
||||
ExecutionKind executionKind;
|
||||
|
||||
|
||||
@@ -3,9 +3,7 @@ package io.kestra.core.models.executions;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.tasks.FlowableTask;
|
||||
import io.kestra.core.models.tasks.ResolvedTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
@@ -95,8 +93,16 @@ public class TaskRun implements TenantInterface {
|
||||
this.forceExecution
|
||||
);
|
||||
}
|
||||
public TaskRun withStateAndAttempt(State.Type state) {
|
||||
List<TaskRunAttempt> newAttempts = new ArrayList<>(this.attempts != null ? this.attempts : List.of());
|
||||
|
||||
if (newAttempts.isEmpty()) {
|
||||
newAttempts.add(TaskRunAttempt.builder().state(new State(state)).build());
|
||||
} else {
|
||||
TaskRunAttempt updatedLast = newAttempts.getLast().withState(state);
|
||||
newAttempts.set(newAttempts.size() - 1, updatedLast);
|
||||
}
|
||||
|
||||
public TaskRun replaceState(State newState) {
|
||||
return new TaskRun(
|
||||
this.tenantId,
|
||||
this.id,
|
||||
@@ -106,9 +112,9 @@ public class TaskRun implements TenantInterface {
|
||||
this.taskId,
|
||||
this.parentTaskRunId,
|
||||
this.value,
|
||||
this.attempts,
|
||||
newAttempts,
|
||||
this.outputs,
|
||||
newState,
|
||||
this.state.withState(state),
|
||||
this.iteration,
|
||||
this.dynamic,
|
||||
this.forceExecution
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import io.kestra.core.models.validations.ManualConstraintViolation;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
/**
|
||||
* Interface for defining an identifiable and typed data.
|
||||
@@ -29,16 +27,4 @@ public interface Data {
|
||||
*/
|
||||
String getDisplayName();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
default ConstraintViolationException toConstraintViolationException(String message, Object value) {
|
||||
Class<Data> cls = (Class<Data>) this.getClass();
|
||||
|
||||
return ManualConstraintViolation.toConstraintViolationException(
|
||||
"Invalid " + (this instanceof Output ? "output" : "input") + " for `" + getId() + "`, " + message + ", but received `" + value + "`",
|
||||
this,
|
||||
cls,
|
||||
this.getId(),
|
||||
value
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
@@ -130,7 +129,7 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
@Valid
|
||||
@PluginProperty
|
||||
List<SLA> sla;
|
||||
|
||||
|
||||
@Schema(
|
||||
title = "Conditions evaluated before the flow is executed.",
|
||||
description = "A list of conditions that are evaluated before the flow is executed. If no checks are defined, the flow executes normally."
|
||||
@@ -343,6 +342,7 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Flow toDeleted() {
|
||||
return this.toBuilder()
|
||||
.revision(this.revision + 1)
|
||||
@@ -355,7 +355,7 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
* To be conservative a flow MUST not return any source.
|
||||
*/
|
||||
@Override
|
||||
@JsonIgnore
|
||||
@Schema(hidden = true)
|
||||
public String getSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -58,4 +58,9 @@ public class FlowForExecution extends AbstractFlow {
|
||||
public String getSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowForExecution toDeleted() {
|
||||
throw new UnsupportedOperationException("Can't delete a FlowForExecution");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.HasSource;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.Label;
|
||||
@@ -27,7 +27,7 @@ import java.util.stream.Collectors;
|
||||
* The base interface for FLow.
|
||||
*/
|
||||
@JsonDeserialize(as = GenericFlow.class)
|
||||
public interface FlowInterface extends FlowId, DeletedInterface, TenantInterface, HasUID, HasSource {
|
||||
public interface FlowInterface extends FlowId, SoftDeletable<FlowInterface>, TenantInterface, HasUID, HasSource {
|
||||
|
||||
Pattern YAML_REVISION_MATCHER = Pattern.compile("(?m)^revision: \\d+\n?");
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.ToString;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@@ -48,7 +46,7 @@ public class FlowWithSource extends Flow {
|
||||
}
|
||||
|
||||
@Override
|
||||
@JsonIgnore(value = false)
|
||||
@Schema(hidden = false)
|
||||
public String getSource() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
@@ -96,4 +96,9 @@ public class GenericFlow extends AbstractFlow implements HasUID {
|
||||
public List<GenericTrigger> getTriggers() {
|
||||
return Optional.ofNullable(triggers).orElse(List.of());
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowInterface toDeleted() {
|
||||
throw new UnsupportedOperationException("Can't delete a GenericFlow");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
package io.kestra.core.models.flows.input;
|
||||
|
||||
import io.kestra.core.exceptions.InputOutputValidationException;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Represents an input along with its associated value and validation state.
|
||||
*
|
||||
@@ -12,15 +14,15 @@ import jakarta.validation.constraints.NotNull;
|
||||
* @param value The provided value for the input.
|
||||
* @param enabled {@code true} if the input is enabled; {@code false} otherwise.
|
||||
* @param isDefault {@code true} if the provided value is the default; {@code false} otherwise.
|
||||
* @param exception The validation exception, if the input value is invalid; {@code null} otherwise.
|
||||
* @param exceptions The validation exceptions, if the input value is invalid; {@code null} otherwise.
|
||||
*/
|
||||
public record InputAndValue(
|
||||
Input<?> input,
|
||||
Object value,
|
||||
boolean enabled,
|
||||
boolean isDefault,
|
||||
ConstraintViolationException exception) {
|
||||
|
||||
Set<InputOutputValidationException> exceptions) {
|
||||
|
||||
/**
|
||||
* Creates a new {@link InputAndValue} instance.
|
||||
*
|
||||
|
||||
@@ -7,6 +7,7 @@ import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.validations.ManualConstraintViolation;
|
||||
import io.kestra.core.validations.Regex;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.ConstraintViolation;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Builder;
|
||||
@@ -14,10 +15,7 @@ import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
@SuperBuilder
|
||||
@@ -77,30 +75,35 @@ public class MultiselectInput extends Input<List<String>> implements ItemTypeInt
|
||||
|
||||
@Override
|
||||
public void validate(List<String> inputs) throws ConstraintViolationException {
|
||||
Set<ConstraintViolation<?>> violations = new HashSet<>();
|
||||
|
||||
if (values != null && options != null) {
|
||||
throw ManualConstraintViolation.toConstraintViolationException(
|
||||
violations.add( ManualConstraintViolation.of(
|
||||
"you can't define both `values` and `options`",
|
||||
this,
|
||||
MultiselectInput.class,
|
||||
getId(),
|
||||
""
|
||||
);
|
||||
));
|
||||
}
|
||||
|
||||
if (!this.getAllowCustomValue()) {
|
||||
for (String input : inputs) {
|
||||
List<@Regex String> finalValues = this.values != null ? this.values : this.options;
|
||||
if (!finalValues.contains(input)) {
|
||||
throw ManualConstraintViolation.toConstraintViolationException(
|
||||
"it must match the values `" + finalValues + "`",
|
||||
violations.add(ManualConstraintViolation.of(
|
||||
"value `" + input + "` doesn't match the values `" + finalValues + "`",
|
||||
this,
|
||||
MultiselectInput.class,
|
||||
getId(),
|
||||
input
|
||||
);
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!violations.isEmpty()) {
|
||||
throw ManualConstraintViolation.toConstraintViolationException(violations);
|
||||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} **/
|
||||
@@ -145,7 +148,7 @@ public class MultiselectInput extends Input<List<String>> implements ItemTypeInt
|
||||
|
||||
String type = Optional.ofNullable(result).map(Object::getClass).map(Class::getSimpleName).orElse("<null>");
|
||||
throw ManualConstraintViolation.toConstraintViolationException(
|
||||
"Invalid expression result. Expected a list of strings, but received " + type,
|
||||
"Invalid expression result. Expected a list of strings",
|
||||
this,
|
||||
MultiselectInput.class,
|
||||
getId(),
|
||||
|
||||
@@ -125,7 +125,7 @@ public class SelectInput extends Input<String> implements RenderableInput {
|
||||
|
||||
String type = Optional.ofNullable(result).map(Object::getClass).map(Class::getSimpleName).orElse("<null>");
|
||||
throw ManualConstraintViolation.toConstraintViolationException(
|
||||
"Invalid expression result. Expected a list of strings, but received " + type,
|
||||
"Invalid expression result. Expected a list of strings",
|
||||
this,
|
||||
SelectInput.class,
|
||||
getId(),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package io.kestra.core.models.kv;
|
||||
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.storages.kv.KVEntry;
|
||||
@@ -22,7 +22,7 @@ import java.util.Optional;
|
||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
public class PersistedKvMetadata implements DeletedInterface, TenantInterface, HasUID {
|
||||
public class PersistedKvMetadata implements SoftDeletable<PersistedKvMetadata>, TenantInterface, HasUID {
|
||||
@With
|
||||
@Hidden
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
|
||||
@@ -83,6 +83,7 @@ public class PersistedKvMetadata implements DeletedInterface, TenantInterface, H
|
||||
return this.toBuilder().updated(Instant.now()).last(true).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public PersistedKvMetadata toDeleted() {
|
||||
return this.toBuilder().updated(Instant.now()).deleted(true).build();
|
||||
}
|
||||
|
||||
@@ -17,8 +17,4 @@ public class Namespace implements NamespaceInterface {
|
||||
@NotNull
|
||||
@Pattern(regexp="^[a-z0-9][a-z0-9._-]*")
|
||||
protected String id;
|
||||
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
boolean deleted = false;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
package io.kestra.core.models.namespaces;
|
||||
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.HasUID;
|
||||
|
||||
public interface NamespaceInterface extends DeletedInterface, HasUID {
|
||||
public interface NamespaceInterface extends HasUID {
|
||||
String getId();
|
||||
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ package io.kestra.core.models.namespaces.files;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.storages.FileAttributes;
|
||||
import io.kestra.core.storages.NamespaceFile;
|
||||
@@ -24,7 +24,7 @@ import java.time.Instant;
|
||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
public class NamespaceFileMetadata implements DeletedInterface, TenantInterface, HasUID {
|
||||
public class NamespaceFileMetadata implements SoftDeletable<NamespaceFileMetadata>, TenantInterface, HasUID {
|
||||
@With
|
||||
@Hidden
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
|
||||
@@ -116,6 +116,7 @@ public class NamespaceFileMetadata implements DeletedInterface, TenantInterface,
|
||||
return this.toBuilder().updated(saveDate).last(true).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public NamespaceFileMetadata toDeleted() {
|
||||
return this.toBuilder().deleted(true).updated(Instant.now()).build();
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import com.fasterxml.jackson.databind.ser.std.StdSerializer;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.RunContextProperty;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
@@ -156,9 +157,9 @@ public class Property<T> {
|
||||
/**
|
||||
* Render a property, then convert it to its target type.<br>
|
||||
* <p>
|
||||
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
|
||||
* This method is designed to be used only by the {@link RunContextProperty}.
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#as(Class)
|
||||
* @see RunContextProperty#as(Class)
|
||||
*/
|
||||
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||
return as(property, context, clazz, Map.of());
|
||||
@@ -167,25 +168,57 @@ public class Property<T> {
|
||||
/**
|
||||
* Render a property with additional variables, then convert it to its target type.<br>
|
||||
* <p>
|
||||
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
|
||||
* This method is designed to be used only by the {@link RunContextProperty}.
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#as(Class, Map)
|
||||
* @see RunContextProperty#as(Class, Map)
|
||||
*/
|
||||
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.skipCache || property.value == null) {
|
||||
String rendered = context.render(property.expression, variables);
|
||||
property.value = MAPPER.convertValue(rendered, clazz);
|
||||
property.value = deserialize(rendered, clazz);
|
||||
}
|
||||
|
||||
return property.value;
|
||||
}
|
||||
|
||||
private static <T> T deserialize(Object rendered, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||
try {
|
||||
return MAPPER.convertValue(rendered, clazz);
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (rendered instanceof String str) {
|
||||
try {
|
||||
return MAPPER.readValue(str, clazz);
|
||||
} catch (JsonProcessingException ex) {
|
||||
throw new IllegalVariableEvaluationException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalVariableEvaluationException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> T deserialize(Object rendered, JavaType type) throws IllegalVariableEvaluationException {
|
||||
try {
|
||||
return MAPPER.convertValue(rendered, type);
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (rendered instanceof String str) {
|
||||
try {
|
||||
return MAPPER.readValue(str, type);
|
||||
} catch (JsonProcessingException ex) {
|
||||
throw new IllegalVariableEvaluationException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalVariableEvaluationException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a property then convert it as a list of target type.<br>
|
||||
* <p>
|
||||
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
|
||||
* This method is designed to be used only by the {@link RunContextProperty}.
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asList(Class)
|
||||
* @see RunContextProperty#asList(Class)
|
||||
*/
|
||||
public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz) throws IllegalVariableEvaluationException {
|
||||
return asList(property, context, itemClazz, Map.of());
|
||||
@@ -194,37 +227,39 @@ public class Property<T> {
|
||||
/**
|
||||
* Render a property with additional variables, then convert it as a list of target type.<br>
|
||||
* <p>
|
||||
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
|
||||
* This method is designed to be used only by the {@link RunContextProperty}.
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asList(Class, Map)
|
||||
* @see RunContextProperty#asList(Class, Map)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.skipCache || property.value == null) {
|
||||
JavaType type = MAPPER.getTypeFactory().constructCollectionLikeType(List.class, itemClazz);
|
||||
try {
|
||||
String trimmedExpression = property.expression.trim();
|
||||
// We need to detect if the expression is already a list or if it's a pebble expression (for eg. referencing a variable containing a list).
|
||||
// Doing that allows us to, if it's an expression, first render then read it as a list.
|
||||
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
|
||||
property.value = MAPPER.readValue(context.render(property.expression, variables), type);
|
||||
}
|
||||
// Otherwise, if it's already a list, we read it as a list first then render it from run context which handle list rendering by rendering each item of the list
|
||||
else {
|
||||
List<?> asRawList = MAPPER.readValue(property.expression, List.class);
|
||||
property.value = (T) asRawList.stream()
|
||||
.map(throwFunction(item -> {
|
||||
if (item instanceof String str) {
|
||||
return MAPPER.convertValue(context.render(str, variables), itemClazz);
|
||||
} else if (item instanceof Map map) {
|
||||
return MAPPER.convertValue(context.render(map, variables), itemClazz);
|
||||
}
|
||||
return item;
|
||||
}))
|
||||
.toList();
|
||||
}
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalVariableEvaluationException(e);
|
||||
String trimmedExpression = property.expression.trim();
|
||||
// We need to detect if the expression is already a list or if it's a pebble expression (for eg. referencing a variable containing a list).
|
||||
// Doing that allows us to, if it's an expression, first render then read it as a list.
|
||||
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
|
||||
property.value = deserialize(context.render(property.expression, variables), type);
|
||||
}
|
||||
// Otherwise, if it's already a list, we read it as a list first then render it from run context which handle list rendering by rendering each item of the list
|
||||
else {
|
||||
List<?> asRawList = deserialize(property.expression, List.class);
|
||||
property.value = (T) asRawList.stream()
|
||||
.map(throwFunction(item -> {
|
||||
Object rendered = null;
|
||||
if (item instanceof String str) {
|
||||
rendered = context.render(str, variables);
|
||||
} else if (item instanceof Map map) {
|
||||
rendered = context.render(map, variables);
|
||||
}
|
||||
|
||||
if (rendered != null) {
|
||||
return deserialize(rendered, itemClazz);
|
||||
}
|
||||
|
||||
return item;
|
||||
}))
|
||||
.toList();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -234,9 +269,9 @@ public class Property<T> {
|
||||
/**
|
||||
* Render a property then convert it as a map of target types.<br>
|
||||
* <p>
|
||||
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
|
||||
* This method is designed to be used only by the {@link RunContextProperty}.
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class)
|
||||
* @see RunContextProperty#asMap(Class, Class)
|
||||
*/
|
||||
public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass) throws IllegalVariableEvaluationException {
|
||||
return asMap(property, runContext, keyClass, valueClass, Map.of());
|
||||
@@ -248,7 +283,7 @@ public class Property<T> {
|
||||
* This method is safe to be used as many times as you want as the rendering and conversion will be cached.
|
||||
* Warning, due to the caching mechanism, this method is not thread-safe.
|
||||
*
|
||||
* @see io.kestra.core.runners.RunContextProperty#asMap(Class, Class, Map)
|
||||
* @see RunContextProperty#asMap(Class, Class, Map)
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
@@ -260,12 +295,12 @@ public class Property<T> {
|
||||
// We need to detect if the expression is already a map or if it's a pebble expression (for eg. referencing a variable containing a map).
|
||||
// Doing that allows us to, if it's an expression, first render then read it as a map.
|
||||
if (trimmedExpression.startsWith("{{") && trimmedExpression.endsWith("}}")) {
|
||||
property.value = MAPPER.readValue(runContext.render(property.expression, variables), targetMapType);
|
||||
property.value = deserialize(runContext.render(property.expression, variables), targetMapType);
|
||||
}
|
||||
// Otherwise if it's already a map we read it as a map first then render it from run context which handle map rendering by rendering each entry of the map (otherwise it will fail with nested expressions in values for eg.)
|
||||
else {
|
||||
Map asRawMap = MAPPER.readValue(property.expression, Map.class);
|
||||
property.value = MAPPER.convertValue(runContext.render(asRawMap, variables), targetMapType);
|
||||
property.value = deserialize(runContext.render(asRawMap, variables), targetMapType);
|
||||
}
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalVariableEvaluationException(e);
|
||||
|
||||
@@ -7,8 +7,8 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.introspect.AnnotatedMember;
|
||||
import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.validations.ManualConstraintViolation;
|
||||
@@ -35,7 +35,7 @@ import jakarta.validation.constraints.Pattern;
|
||||
@Introspected
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
public class Template implements DeletedInterface, TenantInterface, HasUID {
|
||||
public class Template implements SoftDeletable<Template>, TenantInterface, HasUID {
|
||||
private static final ObjectMapper YAML_MAPPER = JacksonMapper.ofYaml().copy()
|
||||
.setAnnotationIntrospector(new JacksonAnnotationIntrospector() {
|
||||
@Override
|
||||
@@ -141,6 +141,7 @@ public class Template implements DeletedInterface, TenantInterface, HasUID {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Template toDeleted() {
|
||||
return new Template(
|
||||
this.tenantId,
|
||||
|
||||
@@ -82,6 +82,12 @@ abstract public class AbstractTrigger implements TriggerInterface {
|
||||
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
|
||||
private boolean failOnTriggerError = false;
|
||||
|
||||
@PluginProperty(group = PluginProperty.CORE_GROUP)
|
||||
@Schema(
|
||||
title = "Specifies whether a trigger is allowed to start a new execution even if a previous run is still in progress."
|
||||
)
|
||||
private boolean allowConcurrent = false;
|
||||
|
||||
/**
|
||||
* For backward compatibility: we rename minLogLevel to logLevel.
|
||||
* @deprecated use {@link #logLevel} instead
|
||||
|
||||
@@ -1,22 +1,37 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Map;
|
||||
|
||||
public interface Schedulable extends PollingTriggerInterface{
|
||||
String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules";
|
||||
|
||||
@Schema(
|
||||
title = "The inputs to pass to the scheduled flow"
|
||||
)
|
||||
@PluginProperty(dynamic = true)
|
||||
Map<String, Object> getInputs();
|
||||
|
||||
@Schema(
|
||||
title = "Action to take in the case of missed schedules",
|
||||
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
|
||||
"The default is `ALL` unless a different value is configured using the global plugin configuration."
|
||||
)
|
||||
@PluginProperty
|
||||
RecoverMissedSchedules getRecoverMissedSchedules();
|
||||
|
||||
/**
|
||||
* Compute the previous evaluation of a trigger.
|
||||
* This is used when a trigger misses some schedule to compute the next date to evaluate in the past.
|
||||
*/
|
||||
ZonedDateTime previousEvaluationDate(ConditionContext conditionContext) throws IllegalVariableEvaluationException;
|
||||
|
||||
RecoverMissedSchedules getRecoverMissedSchedules();
|
||||
|
||||
|
||||
/**
|
||||
* Load the default RecoverMissedSchedules from plugin property, or else ALL.
|
||||
*/
|
||||
|
||||
@@ -172,7 +172,7 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
|
||||
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
|
||||
try {
|
||||
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
|
||||
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, lastTrigger);
|
||||
} catch (InvalidTriggerConfigurationException e) {
|
||||
disabled = true;
|
||||
}
|
||||
|
||||
@@ -6,12 +6,9 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.ExecutionTrigger;
|
||||
import io.kestra.core.models.tasks.Output;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.*;
|
||||
|
||||
public abstract class TriggerService {
|
||||
@@ -51,49 +48,6 @@ public abstract class TriggerService {
|
||||
return generateExecution(IdUtils.create(), trigger, context, executionTrigger, conditionContext);
|
||||
}
|
||||
|
||||
public static Execution generateScheduledExecution(
|
||||
AbstractTrigger trigger,
|
||||
ConditionContext conditionContext,
|
||||
TriggerContext context,
|
||||
List<Label> labels,
|
||||
Map<String, Object> inputs,
|
||||
Map<String, Object> variables,
|
||||
Optional<ZonedDateTime> scheduleDate
|
||||
) {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
ExecutionTrigger executionTrigger = ExecutionTrigger.of(trigger, variables);
|
||||
|
||||
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
|
||||
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
|
||||
// add a correlation ID if none exist
|
||||
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
|
||||
}
|
||||
Execution execution = Execution.builder()
|
||||
.id(runContext.getTriggerExecutionId())
|
||||
.tenantId(context.getTenantId())
|
||||
.namespace(context.getNamespace())
|
||||
.flowId(context.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.variables(conditionContext.getFlow().getVariables())
|
||||
.labels(executionLabels)
|
||||
.state(new State())
|
||||
.trigger(executionTrigger)
|
||||
.scheduleDate(scheduleDate.map(date -> date.toInstant()).orElse(null))
|
||||
.build();
|
||||
|
||||
Map<String, Object> allInputs = new HashMap<>();
|
||||
|
||||
if (inputs != null) {
|
||||
allInputs.putAll(inputs);
|
||||
}
|
||||
|
||||
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
|
||||
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
|
||||
|
||||
return execution;
|
||||
}
|
||||
|
||||
private static Execution generateExecution(
|
||||
String id,
|
||||
AbstractTrigger trigger,
|
||||
@@ -102,6 +56,7 @@ public abstract class TriggerService {
|
||||
ConditionContext conditionContext
|
||||
) {
|
||||
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(trigger.getLabels()));
|
||||
executionLabels.add(new Label(Label.FROM, "trigger"));
|
||||
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
|
||||
// add a correlation ID if none exist
|
||||
executionLabels.add(new Label(Label.CORRELATION_ID, id));
|
||||
|
||||
@@ -67,6 +67,11 @@ public class ManualConstraintViolation<T> implements ConstraintViolation<T> {
|
||||
invalidValue
|
||||
)));
|
||||
}
|
||||
public static <T> ConstraintViolationException toConstraintViolationException(
|
||||
Set<? extends ConstraintViolation<?>> constraintViolations
|
||||
) {
|
||||
return new ConstraintViolationException(constraintViolations);
|
||||
}
|
||||
|
||||
public String getMessageTemplate() {
|
||||
return "{messageTemplate}";
|
||||
|
||||
@@ -36,7 +36,7 @@ public interface KvMetadataRepositoryInterface extends SaveRepositoryInterface<P
|
||||
);
|
||||
|
||||
default PersistedKvMetadata delete(PersistedKvMetadata persistedKvMetadata) throws IOException {
|
||||
return this.save(persistedKvMetadata.toBuilder().deleted(true).build());
|
||||
return this.save(persistedKvMetadata.toDeleted());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import io.kestra.core.models.Setting;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
public interface SettingRepositoryInterface {
|
||||
Optional<Setting> findByKey(String key);
|
||||
@@ -13,5 +13,7 @@ public interface SettingRepositoryInterface {
|
||||
|
||||
Setting save(Setting setting) throws ConstraintViolationException;
|
||||
|
||||
Setting internalSave(Setting setting) throws ConstraintViolationException;
|
||||
|
||||
Setting delete(Setting setting);
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ import java.util.function.Function;
|
||||
public interface TriggerRepositoryInterface extends QueryBuilderInterface<Triggers.Fields> {
|
||||
Optional<Trigger> findLast(TriggerContext trigger);
|
||||
|
||||
Optional<Trigger> findByExecution(Execution execution);
|
||||
|
||||
Optional<Trigger> findByUid(String uid);
|
||||
|
||||
List<Trigger> findAll(String tenantId);
|
||||
|
||||
List<Trigger> findAllForAllTenants();
|
||||
|
||||
@@ -599,6 +599,11 @@ public class DefaultRunContext extends RunContext {
|
||||
return localPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputAndOutput inputAndOutput() {
|
||||
return new InputAndOutputImpl(this.applicationContext, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder class for constructing new {@link DefaultRunContext} objects.
|
||||
*/
|
||||
|
||||
@@ -189,12 +189,11 @@ public final class ExecutableUtils {
|
||||
variables.put("taskRunIteration", currentTaskRun.getIteration());
|
||||
}
|
||||
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
|
||||
Instant scheduleOnDate = runContext.render(scheduleDate).as(ZonedDateTime.class).map(date -> date.toInstant()).orElse(null);
|
||||
Execution execution = Execution
|
||||
.newExecution(
|
||||
flow,
|
||||
(f, e) -> flowInputOutput.readExecutionInputs(f, e, inputs),
|
||||
(f, e) -> runContext.inputAndOutput().readInputs(f, e, inputs),
|
||||
newLabels,
|
||||
Optional.empty())
|
||||
.withTrigger(ExecutionTrigger.builder()
|
||||
|
||||
@@ -3,13 +3,13 @@ package io.kestra.core.runners;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.kestra.core.encryption.EncryptionService;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
|
||||
import io.kestra.core.exceptions.InputOutputValidationException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Data;
|
||||
import io.kestra.core.models.flows.DependsOn;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.flows.Output;
|
||||
import io.kestra.core.models.flows.RenderableInput;
|
||||
import io.kestra.core.models.flows.Type;
|
||||
import io.kestra.core.models.flows.input.FileInput;
|
||||
@@ -19,7 +19,6 @@ import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.property.PropertyContext;
|
||||
import io.kestra.core.models.property.URIFetcher;
|
||||
import io.kestra.core.models.tasks.common.EncryptedString;
|
||||
import io.kestra.core.models.validations.ManualConstraintViolation;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
@@ -209,8 +208,8 @@ public class FlowInputOutput {
|
||||
.filter(InputAndValue::enabled)
|
||||
.map(it -> {
|
||||
//TODO check to return all exception at-once.
|
||||
if (it.exception() != null) {
|
||||
throw it.exception();
|
||||
if (it.exceptions() != null && !it.exceptions().isEmpty()) {
|
||||
throw InputOutputValidationException.merge(it.exceptions());
|
||||
}
|
||||
return new AbstractMap.SimpleEntry<>(it.input().getId(), it.value());
|
||||
})
|
||||
@@ -294,13 +293,9 @@ public class FlowInputOutput {
|
||||
try {
|
||||
isInputEnabled = Boolean.TRUE.equals(runContext.renderTyped(dependsOnCondition.get()));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
resolvable.resolveWithError(ManualConstraintViolation.toConstraintViolationException(
|
||||
"Invalid condition: " + e.getMessage(),
|
||||
input,
|
||||
(Class<Input>)input.getClass(),
|
||||
input.getId(),
|
||||
this
|
||||
));
|
||||
resolvable.resolveWithError(
|
||||
InputOutputValidationException.of("Invalid condition: " + e.getMessage())
|
||||
);
|
||||
isInputEnabled = false;
|
||||
}
|
||||
}
|
||||
@@ -333,7 +328,7 @@ public class FlowInputOutput {
|
||||
// validate and parse input value
|
||||
if (value == null) {
|
||||
if (input.getRequired()) {
|
||||
resolvable.resolveWithError(input.toConstraintViolationException("missing required input", null));
|
||||
resolvable.resolveWithError(InputOutputValidationException.of("Missing required input:" + input.getId()));
|
||||
} else {
|
||||
resolvable.resolveWithValue(null);
|
||||
}
|
||||
@@ -343,17 +338,18 @@ public class FlowInputOutput {
|
||||
parsedInput.ifPresent(parsed -> ((Input) resolvable.get().input()).validate(parsed.getValue()));
|
||||
parsedInput.ifPresent(typed -> resolvable.resolveWithValue(typed.getValue()));
|
||||
} catch (ConstraintViolationException e) {
|
||||
ConstraintViolationException exception = e.getConstraintViolations().size() == 1 ?
|
||||
input.toConstraintViolationException(List.copyOf(e.getConstraintViolations()).getFirst().getMessage(), value) :
|
||||
input.toConstraintViolationException(e.getMessage(), value);
|
||||
resolvable.resolveWithError(exception);
|
||||
Input<?> finalInput = input;
|
||||
Set<InputOutputValidationException> exceptions = e.getConstraintViolations().stream()
|
||||
.map(c-> InputOutputValidationException.of(c.getMessage(), finalInput))
|
||||
.collect(Collectors.toSet());
|
||||
resolvable.resolveWithError(exceptions);
|
||||
}
|
||||
}
|
||||
} catch (ConstraintViolationException e) {
|
||||
resolvable.resolveWithError(e);
|
||||
} catch (Exception e) {
|
||||
ConstraintViolationException exception = input.toConstraintViolationException(e instanceof IllegalArgumentException ? e.getMessage() : e.toString(), resolvable.get().value());
|
||||
resolvable.resolveWithError(exception);
|
||||
} catch (IllegalArgumentException e){
|
||||
resolvable.resolveWithError(InputOutputValidationException.of(e.getMessage(), input));
|
||||
}
|
||||
catch (Exception e) {
|
||||
resolvable.resolveWithError(InputOutputValidationException.of(e.getMessage()));
|
||||
}
|
||||
|
||||
return resolvable.get();
|
||||
@@ -441,8 +437,12 @@ public class FlowInputOutput {
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
} catch (Exception e) {
|
||||
throw output.toConstraintViolationException(e.getMessage(), current);
|
||||
}
|
||||
catch (IllegalArgumentException e){
|
||||
throw InputOutputValidationException.of(e.getMessage(), output);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw InputOutputValidationException.of(e.getMessage());
|
||||
}
|
||||
})
|
||||
.filter(Optional::isPresent)
|
||||
@@ -505,7 +505,7 @@ public class FlowInputOutput {
|
||||
if (matcher.matches()) {
|
||||
yield current.toString();
|
||||
} else {
|
||||
throw new IllegalArgumentException("Expected `URI` but received `" + current + "`");
|
||||
throw new IllegalArgumentException("Invalid URI format.");
|
||||
}
|
||||
}
|
||||
case ARRAY, MULTISELECT -> {
|
||||
@@ -535,34 +535,10 @@ public class FlowInputOutput {
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw e;
|
||||
} catch (Throwable e) {
|
||||
throw new Exception("Expected `" + type + "` but received `" + current + "` with errors:\n```\n" + e.getMessage() + "\n```");
|
||||
throw new Exception(" errors:\n```\n" + e.getMessage() + "\n```");
|
||||
}
|
||||
}
|
||||
|
||||
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
|
||||
if (outputs == null) return Map.of();
|
||||
|
||||
// render required outputs
|
||||
Map<String, Object> outputsById = outputs
|
||||
.stream()
|
||||
.filter(output -> output.getRequired() == null || output.getRequired())
|
||||
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
|
||||
outputsById = runContext.render(outputsById);
|
||||
|
||||
// render optional outputs one by one to catch, log, and skip any error.
|
||||
for (io.kestra.core.models.flows.Output output : outputs) {
|
||||
if (Boolean.FALSE.equals(output.getRequired())) {
|
||||
try {
|
||||
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
|
||||
} catch (Exception e) {
|
||||
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
|
||||
outputsById.put(output.getId(), null);
|
||||
}
|
||||
}
|
||||
}
|
||||
return outputsById;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutable wrapper to hold a flow's input, and it's resolved value.
|
||||
*/
|
||||
@@ -591,27 +567,30 @@ public class FlowInputOutput {
|
||||
}
|
||||
|
||||
public void isDefault(boolean isDefault) {
|
||||
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exception());
|
||||
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exceptions());
|
||||
}
|
||||
|
||||
public void setInput(final Input<?> input) {
|
||||
this.input = new InputAndValue(input, this.input.value(), this.input.enabled(), this.input.isDefault(), this.input.exception());
|
||||
this.input = new InputAndValue(input, this.input.value(), this.input.enabled(), this.input.isDefault(), this.input.exceptions());
|
||||
}
|
||||
|
||||
public void resolveWithEnabled(boolean enabled) {
|
||||
this.input = new InputAndValue(this.input.input(), input.value(), enabled, this.input.isDefault(), this.input.exception());
|
||||
this.input = new InputAndValue(this.input.input(), input.value(), enabled, this.input.isDefault(), this.input.exceptions());
|
||||
markAsResolved();
|
||||
}
|
||||
|
||||
public void resolveWithValue(@Nullable Object value) {
|
||||
this.input = new InputAndValue(this.input.input(), value, this.input.enabled(), this.input.isDefault(), this.input.exception());
|
||||
this.input = new InputAndValue(this.input.input(), value, this.input.enabled(), this.input.isDefault(), this.input.exceptions());
|
||||
markAsResolved();
|
||||
}
|
||||
|
||||
public void resolveWithError(@Nullable ConstraintViolationException exception) {
|
||||
public void resolveWithError(@Nullable Set<InputOutputValidationException> exception) {
|
||||
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), this.input.isDefault(), exception);
|
||||
markAsResolved();
|
||||
}
|
||||
private void resolveWithError(@Nullable InputOutputValidationException exception){
|
||||
resolveWithError(Collections.singleton(exception));
|
||||
}
|
||||
|
||||
private void markAsResolved() {
|
||||
this.isResolved = true;
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Output;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* InputAndOutput could be used to work with flow execution inputs and outputs.
|
||||
*/
|
||||
public interface InputAndOutput {
|
||||
/**
|
||||
* Reads the inputs of a flow execution.
|
||||
*/
|
||||
Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs);
|
||||
|
||||
/**
|
||||
* Processes the outputs of a flow execution (parse them based on their types).
|
||||
*/
|
||||
Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs);
|
||||
|
||||
/**
|
||||
* Render flow execution outputs.
|
||||
*/
|
||||
Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException;
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Output;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
class InputAndOutputImpl implements InputAndOutput {
|
||||
private final FlowInputOutput flowInputOutput;
|
||||
private final RunContext runContext;
|
||||
|
||||
InputAndOutputImpl(ApplicationContext applicationContext, RunContext runContext) {
|
||||
this.flowInputOutput = applicationContext.getBean(FlowInputOutput.class);
|
||||
this.runContext = runContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> readInputs(FlowInterface flow, Execution execution, Map<String, Object> inputs) {
|
||||
return flowInputOutput.readExecutionInputs(flow, execution, inputs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> typedOutputs(FlowInterface flow, Execution execution, Map<String, Object> rOutputs) {
|
||||
return flowInputOutput.typedOutputs(flow, execution, rOutputs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> renderOutputs(List<Output> outputs) throws IllegalVariableEvaluationException {
|
||||
if (outputs == null) return Map.of();
|
||||
|
||||
// render required outputs
|
||||
Map<String, Object> outputsById = outputs
|
||||
.stream()
|
||||
.filter(output -> output.getRequired() == null || output.getRequired())
|
||||
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
|
||||
outputsById = runContext.render(outputsById);
|
||||
|
||||
// render optional outputs one by one to catch, log, and skip any error.
|
||||
for (io.kestra.core.models.flows.Output output : outputs) {
|
||||
if (Boolean.FALSE.equals(output.getRequired())) {
|
||||
try {
|
||||
outputsById.putAll(runContext.render(Map.of(output.getId(), output.getValue())));
|
||||
} catch (Exception e) {
|
||||
runContext.logger().warn("Failed to render optional flow output '{}'. Output is ignored.", output.getId(), e);
|
||||
outputsById.put(output.getId(), null);
|
||||
}
|
||||
}
|
||||
}
|
||||
return outputsById;
|
||||
}
|
||||
}
|
||||
@@ -211,4 +211,9 @@ public abstract class RunContext implements PropertyContext {
|
||||
* @return a new run context with the plugin configuration of the given plugin.
|
||||
*/
|
||||
public abstract RunContext cloneForPlugin(Plugin plugin);
|
||||
|
||||
/**
|
||||
* @return an InputAndOutput that can be used to work with inputs and outputs.
|
||||
*/
|
||||
public abstract InputAndOutput inputAndOutput();
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import io.micronaut.core.annotation.Nullable;
|
||||
import io.pebbletemplates.pebble.PebbleEngine;
|
||||
import io.pebbletemplates.pebble.extension.Extension;
|
||||
import io.pebbletemplates.pebble.extension.Function;
|
||||
import io.pebbletemplates.pebble.lexer.Syntax;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
@@ -37,6 +38,13 @@ public class PebbleEngineFactory {
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public PebbleEngine createWithCustomSyntax(Syntax syntax, Class<? extends Extension> extension) {
|
||||
PebbleEngine.Builder builder = newPebbleEngineBuilder()
|
||||
.syntax(syntax);
|
||||
this.applicationContext.getBeansOfType(extension).forEach(builder::extension);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public PebbleEngine createWithMaskedFunctions(VariableRenderer renderer, final List<String> functionsToMask) {
|
||||
|
||||
PebbleEngine.Builder builder = newPebbleEngineBuilder();
|
||||
|
||||
@@ -35,6 +35,10 @@ public final class YamlParser {
|
||||
return read(input, cls, type(cls));
|
||||
}
|
||||
|
||||
public static <T> T parse(String input, Class<T> cls, Boolean strict) {
|
||||
return strict ? read(input, cls, type(cls)) : readNonStrict(input, cls, type(cls));
|
||||
}
|
||||
|
||||
public static <T> T parse(Map<String, Object> input, Class<T> cls, Boolean strict) {
|
||||
ObjectMapper currentMapper = strict ? STRICT_MAPPER : NON_STRICT_MAPPER;
|
||||
|
||||
@@ -81,7 +85,31 @@ public final class YamlParser {
|
||||
throw toConstraintViolationException(input, resource, e);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> T readNonStrict(String input, Class<T> objectClass, String resource) {
|
||||
try {
|
||||
return NON_STRICT_MAPPER.readValue(input, objectClass);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw toConstraintViolationException(input, resource, e);
|
||||
}
|
||||
}
|
||||
private static String formatYamlErrorMessage(String originalMessage, JsonProcessingException e) {
|
||||
StringBuilder friendlyMessage = new StringBuilder();
|
||||
if (originalMessage.contains("Expected a field name")) {
|
||||
friendlyMessage.append("YAML syntax error: Invalid structure. Check indentation and ensure all fields are properly formatted.");
|
||||
} else if (originalMessage.contains("MappingStartEvent")) {
|
||||
friendlyMessage.append("YAML syntax error: Unexpected mapping start. Verify that scalar values are properly quoted if needed.");
|
||||
} else if (originalMessage.contains("Scalar value")) {
|
||||
friendlyMessage.append("YAML syntax error: Expected a simple value but found complex structure. Check for unquoted special characters.");
|
||||
} else {
|
||||
friendlyMessage.append("YAML parsing error: ").append(originalMessage.replaceAll("org\\.yaml\\.snakeyaml.*", "").trim());
|
||||
}
|
||||
if (e.getLocation() != null) {
|
||||
int line = e.getLocation().getLineNr();
|
||||
friendlyMessage.append(String.format(" (at line %d)", line));
|
||||
}
|
||||
// Return a generic but cleaner message for other YAML errors
|
||||
return friendlyMessage.toString();
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T> ConstraintViolationException toConstraintViolationException(T target, String resource, JsonProcessingException e) {
|
||||
if (e.getCause() instanceof ConstraintViolationException constraintViolationException) {
|
||||
@@ -121,11 +149,12 @@ public final class YamlParser {
|
||||
)
|
||||
));
|
||||
} else {
|
||||
String userFriendlyMessage = formatYamlErrorMessage(e.getMessage(), e);
|
||||
return new ConstraintViolationException(
|
||||
"Illegal " + resource + " source: " + e.getMessage(),
|
||||
"Illegal " + resource + " source: " + userFriendlyMessage,
|
||||
Collections.singleton(
|
||||
ManualConstraintViolation.of(
|
||||
e.getCause() == null ? e.getMessage() : e.getMessage() + "\nCaused by: " + e.getCause().getMessage(),
|
||||
userFriendlyMessage,
|
||||
target,
|
||||
(Class<T>) target.getClass(),
|
||||
"yaml",
|
||||
@@ -136,4 +165,3 @@ public final class YamlParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import com.cronutils.utils.VisibleForTesting;
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
import io.kestra.core.models.conditions.Condition;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.conditions.ScheduleCondition;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
@@ -65,16 +64,6 @@ public class ConditionService {
|
||||
return this.valid(flow, conditions, conditionContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that all conditions are valid.
|
||||
* Warning, this method throws if a condition cannot be evaluated.
|
||||
*/
|
||||
public boolean isValid(List<ScheduleCondition> conditions, ConditionContext conditionContext) throws InternalException {
|
||||
return conditions
|
||||
.stream()
|
||||
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that all conditions are valid.
|
||||
* Warning, this method throws if a condition cannot be evaluated.
|
||||
|
||||
@@ -754,7 +754,7 @@ public class ExecutionService {
|
||||
var parentTaskRun = execution.findTaskRunByTaskRunId(taskRun.getParentTaskRunId());
|
||||
Execution newExecution = execution;
|
||||
if (parentTaskRun.getState().getCurrent() != State.Type.KILLED) {
|
||||
newExecution = newExecution.withTaskRun(parentTaskRun.withState(State.Type.KILLED));
|
||||
newExecution = newExecution.withTaskRun(parentTaskRun.withStateAndAttempt(State.Type.KILLED));
|
||||
}
|
||||
if (parentTaskRun.getParentTaskRunId() != null) {
|
||||
return killParentTaskruns(parentTaskRun, newExecution);
|
||||
|
||||
@@ -92,7 +92,14 @@ public class FlowService {
|
||||
return flowRepository
|
||||
.orElseThrow(() -> new IllegalStateException("Cannot perform operation on flow. Cause: No FlowRepository"));
|
||||
}
|
||||
|
||||
private static String formatValidationError(String message) {
|
||||
if (message.startsWith("Illegal flow source:")) {
|
||||
// Already formatted by YamlParser, return as-is
|
||||
return message;
|
||||
}
|
||||
// For other validation errors, provide context
|
||||
return "Validation error: " + message;
|
||||
}
|
||||
/**
|
||||
* Evaluates all checks defined in the given flow using the provided inputs.
|
||||
* <p>
|
||||
@@ -174,10 +181,12 @@ public class FlowService {
|
||||
modelValidator.validate(pluginDefaultService.injectAllDefaults(flow, false));
|
||||
|
||||
} catch (ConstraintViolationException e) {
|
||||
validateConstraintViolationBuilder.constraints(e.getMessage());
|
||||
String friendlyMessage = formatValidationError(e.getMessage());
|
||||
validateConstraintViolationBuilder.constraints(friendlyMessage);
|
||||
} catch (FlowProcessingException e) {
|
||||
if (e.getCause() instanceof ConstraintViolationException) {
|
||||
validateConstraintViolationBuilder.constraints(e.getMessage());
|
||||
if (e.getCause() instanceof ConstraintViolationException cve) {
|
||||
String friendlyMessage = formatValidationError(cve.getMessage());
|
||||
validateConstraintViolationBuilder.constraints(friendlyMessage);
|
||||
} else {
|
||||
Throwable cause = e.getCause() != null ? e.getCause() : e;
|
||||
validateConstraintViolationBuilder.constraints("Unable to validate the flow: " + cause.getMessage());
|
||||
@@ -579,4 +588,4 @@ public class FlowService {
|
||||
private IllegalStateException noRepositoryException() {
|
||||
return new IllegalStateException("No repository found. Make sure the `kestra.repository.type` property is set.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.core.storages;
|
||||
|
||||
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
|
||||
import io.kestra.core.services.NamespaceService;
|
||||
import jakarta.annotation.Nullable;
|
||||
import org.slf4j.Logger;
|
||||
@@ -272,7 +271,13 @@ public class InternalStorage implements Storage {
|
||||
return this.storage.put(context.getTenantId(), context.getNamespace(), resolve, new BufferedInputStream(inputStream));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<StorageContext.Task> getTaskStorageContext() {
|
||||
return Optional.ofNullable((context instanceof StorageContext.Task task) ? task : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<FileAttributes> list(URI uri) throws IOException {
|
||||
return this.storage.list(context.getTenantId(), context.getNamespace(), uri);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,4 +173,6 @@ public interface Storage {
|
||||
* @return the task storage context
|
||||
*/
|
||||
Optional<StorageContext.Task> getTaskStorageContext();
|
||||
|
||||
List<FileAttributes> list(URI uri) throws IOException;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package io.kestra.core.test;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.HasSource;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
@@ -25,7 +25,7 @@ import java.util.List;
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
@TestSuiteValidation
|
||||
public class TestSuite implements HasUID, TenantInterface, DeletedInterface, HasSource {
|
||||
public class TestSuite implements HasUID, TenantInterface, SoftDeletable<TestSuite>, HasSource {
|
||||
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@@ -85,10 +85,6 @@ public class TestSuite implements HasUID, TenantInterface, DeletedInterface, Has
|
||||
);
|
||||
}
|
||||
|
||||
public TestSuite delete() {
|
||||
return this.toBuilder().deleted(true).build();
|
||||
}
|
||||
|
||||
public TestSuite disable() {
|
||||
var disabled = true;
|
||||
return this.toBuilder()
|
||||
@@ -120,4 +116,9 @@ public class TestSuite implements HasUID, TenantInterface, DeletedInterface, Has
|
||||
|
||||
return yamlSource + String.format("\ndisabled: %s", disabled);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TestSuite toDeleted() {
|
||||
return toBuilder().deleted(true).build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package io.kestra.core.test;
|
||||
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.SoftDeletable;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.test.flow.UnitTestResult;
|
||||
@@ -24,7 +24,7 @@ public record TestSuiteRunEntity(
|
||||
String flowId,
|
||||
TestState state,
|
||||
List<UnitTestResult> results
|
||||
) implements DeletedInterface, TenantInterface, HasUID {
|
||||
) implements SoftDeletable<TestSuiteRunEntity>, TenantInterface, HasUID {
|
||||
|
||||
public static TestSuiteRunEntity create(String tenantId, TestSuiteUid testSuiteUid, TestSuiteRunResult testSuiteRunResult) {
|
||||
return new TestSuiteRunEntity(
|
||||
@@ -43,23 +43,6 @@ public record TestSuiteRunEntity(
|
||||
);
|
||||
}
|
||||
|
||||
public TestSuiteRunEntity delete() {
|
||||
return new TestSuiteRunEntity(
|
||||
this.uid,
|
||||
this.id,
|
||||
this.tenantId,
|
||||
true,
|
||||
this.startDate,
|
||||
this.endDate,
|
||||
this.testSuiteId,
|
||||
this.testSuiteUid,
|
||||
this.namespace,
|
||||
this.flowId,
|
||||
this.state,
|
||||
this.results
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* only used for backup
|
||||
* @param newTenantId the tenant to migrate to
|
||||
@@ -86,6 +69,24 @@ public record TestSuiteRunEntity(
|
||||
return this.deleted;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TestSuiteRunEntity toDeleted() {
|
||||
return new TestSuiteRunEntity(
|
||||
this.uid,
|
||||
this.id,
|
||||
this.tenantId,
|
||||
true,
|
||||
this.startDate,
|
||||
this.endDate,
|
||||
this.testSuiteId,
|
||||
this.testSuiteUid,
|
||||
this.namespace,
|
||||
this.flowId,
|
||||
this.state,
|
||||
this.results
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTenantId() {
|
||||
return this.tenantId;
|
||||
|
||||
@@ -1,13 +1,39 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import io.kestra.core.models.Setting;
|
||||
import io.kestra.core.repositories.SettingRepositoryInterface;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
@Singleton
|
||||
public class EditionProvider {
|
||||
public Edition get() {
|
||||
return Edition.OSS;
|
||||
}
|
||||
|
||||
@Inject
|
||||
private Optional<SettingRepositoryInterface> settingRepository; // repositories are not always there on unit tests
|
||||
|
||||
@PostConstruct
|
||||
void start() {
|
||||
// check the edition in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
|
||||
settingRepository.ifPresent(settingRepositoryInterface -> persistEdition(settingRepositoryInterface, get()));
|
||||
}
|
||||
|
||||
private void persistEdition(SettingRepositoryInterface settingRepositoryInterface, Edition edition) {
|
||||
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_EDITION);
|
||||
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(edition)) {
|
||||
settingRepositoryInterface.save(Setting.builder()
|
||||
.key(Setting.INSTANCE_EDITION)
|
||||
.value(edition)
|
||||
.build()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public enum Edition {
|
||||
OSS,
|
||||
EE
|
||||
|
||||
@@ -23,7 +23,6 @@ import io.kestra.core.serializers.ListOrMapOfLabelSerializer;
|
||||
import io.kestra.core.services.StorageService;
|
||||
import io.kestra.core.storages.FileAttributes;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.StorageSplitInterface;
|
||||
import io.kestra.core.utils.GraphUtils;
|
||||
import io.kestra.core.validations.NoSystemLabelValidation;
|
||||
@@ -540,7 +539,7 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
|
||||
.numberOfBatches((Integer) taskRun.getOutputs().get(ExecutableUtils.TASK_VARIABLE_NUMBER_OF_BATCHES));
|
||||
|
||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
|
||||
FileSerde.write(bos, FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext));
|
||||
FileSerde.write(bos, runContext.inputAndOutput().renderOutputs(flow.getOutputs()));
|
||||
URI uri = runContext.storage().putFile(
|
||||
new ByteArrayInputStream(bos.toByteArray()),
|
||||
URI.create((String) taskRun.getOutputs().get("uri"))
|
||||
@@ -602,9 +601,8 @@ public class ForEachItem extends Task implements FlowableTask<VoidOutput>, Child
|
||||
String subflowOutputsBase = (String) taskOutput.get(ExecutableUtils.TASK_VARIABLE_SUBFLOW_OUTPUTS_BASE_URI);
|
||||
URI subflowOutputsBaseUri = URI.create(StorageContext.KESTRA_PROTOCOL + subflowOutputsBase + "/");
|
||||
|
||||
StorageInterface storage = ((DefaultRunContext) runContext).getApplicationContext().getBean(StorageInterface.class);
|
||||
if (storage.exists(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri)) {
|
||||
List<FileAttributes> list = storage.list(runContext.flowInfo().tenantId(), runContext.flowInfo().namespace(), subflowOutputsBaseUri);
|
||||
if (runContext.storage().isFileExist(subflowOutputsBaseUri)) {
|
||||
List<FileAttributes> list = runContext.storage().list(subflowOutputsBaseUri);;
|
||||
|
||||
if (!list.isEmpty()) {
|
||||
// Merge outputs from each sub-flow into a single stored in the internal storage.
|
||||
|
||||
@@ -63,7 +63,8 @@ import java.util.*;
|
||||
|
||||
- id: run_post_approval
|
||||
type: io.kestra.plugin.scripts.shell.Commands
|
||||
runner: PROCESS
|
||||
taskRunner:
|
||||
type: io.kestra.plugin.core.runner.Process
|
||||
commands:
|
||||
- echo "Manual approval received! Continuing the execution..."
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ import io.kestra.core.models.tasks.ExecutableTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.ExecutableUtils;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.FlowMetaStoreInterface;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.SubflowExecution;
|
||||
@@ -38,7 +37,6 @@ import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.ToString;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collections;
|
||||
@@ -246,11 +244,11 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
|
||||
|
||||
if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
|
||||
try {
|
||||
Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
|
||||
var inputAndOutput = runContext.inputAndOutput();
|
||||
Map<String, Object> rOutputs = inputAndOutput.renderOutputs(subflowOutputs);
|
||||
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
|
||||
if (flow.getOutputs() != null && flowInputOutput != null) {
|
||||
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
|
||||
if (flow.getOutputs() != null) {
|
||||
rOutputs = inputAndOutput.typedOutputs(flow, execution, rOutputs);
|
||||
}
|
||||
builder.outputs(rOutputs);
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -2,10 +2,8 @@ package io.kestra.plugin.core.namespace;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.kestra.core.repositories.NamespaceFileMetadataRepositoryInterface;
|
||||
import io.kestra.core.storages.Namespace;
|
||||
import io.kestra.core.storages.NamespaceFile;
|
||||
import io.kestra.plugin.core.kv.Version;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@@ -0,0 +1,107 @@
|
||||
package io.kestra.plugin.core.trigger;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.ExecutionTrigger;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.Backfill;
|
||||
import io.kestra.core.models.triggers.Schedulable;
|
||||
import io.kestra.core.models.triggers.TriggerContext;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.services.LabelService;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.chrono.ChronoZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Factory class for constructing a new {@link Execution} from a {@link Schedulable} trigger.
|
||||
*
|
||||
* @see io.kestra.plugin.core.trigger.Schedule
|
||||
* @see io.kestra.plugin.core.trigger.ScheduleOnDates
|
||||
*/
|
||||
final class SchedulableExecutionFactory {
|
||||
|
||||
static Execution createFailedExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext) throws IllegalVariableEvaluationException {
|
||||
return Execution.builder()
|
||||
.id(conditionContext.getRunContext().getTriggerExecutionId())
|
||||
.tenantId(triggerContext.getTenantId())
|
||||
.namespace(triggerContext.getNamespace())
|
||||
.flowId(triggerContext.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.labels(SchedulableExecutionFactory.getLabels(trigger, conditionContext.getRunContext(), triggerContext.getBackfill(), conditionContext.getFlow()))
|
||||
.state(new State().withState(State.Type.FAILED))
|
||||
.build();
|
||||
}
|
||||
|
||||
static Execution createExecution(Schedulable trigger, ConditionContext conditionContext, TriggerContext triggerContext, Map<String, Object> variables, ZonedDateTime scheduleDate) throws IllegalVariableEvaluationException {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
ExecutionTrigger executionTrigger = ExecutionTrigger.of((AbstractTrigger) trigger, variables);
|
||||
|
||||
List<Label> labels = getLabels(trigger, runContext, triggerContext.getBackfill(), conditionContext.getFlow());
|
||||
|
||||
List<Label> executionLabels = new ArrayList<>(ListUtils.emptyOnNull(labels));
|
||||
executionLabels.add(new Label(Label.FROM, "trigger"));
|
||||
if (executionLabels.stream().noneMatch(label -> Label.CORRELATION_ID.equals(label.key()))) {
|
||||
// add a correlation ID if none exist
|
||||
executionLabels.add(new Label(Label.CORRELATION_ID, runContext.getTriggerExecutionId()));
|
||||
}
|
||||
|
||||
Execution execution = Execution.builder()
|
||||
.id(runContext.getTriggerExecutionId())
|
||||
.tenantId(triggerContext.getTenantId())
|
||||
.namespace(triggerContext.getNamespace())
|
||||
.flowId(triggerContext.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.variables(conditionContext.getFlow().getVariables())
|
||||
.labels(executionLabels)
|
||||
.state(new State())
|
||||
.trigger(executionTrigger)
|
||||
.scheduleDate(Optional.ofNullable(scheduleDate).map(ChronoZonedDateTime::toInstant).orElse(null))
|
||||
.build();
|
||||
|
||||
Map<String, Object> allInputs = getInputs(trigger, runContext, triggerContext.getBackfill());
|
||||
|
||||
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
|
||||
execution = execution.withInputs(runContext.inputAndOutput().readInputs(conditionContext.getFlow(), execution, allInputs));
|
||||
|
||||
return execution;
|
||||
}
|
||||
|
||||
private static Map<String, Object> getInputs(Schedulable trigger, RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
if (trigger.getInputs() != null) {
|
||||
inputs.putAll(runContext.render(trigger.getInputs()));
|
||||
}
|
||||
|
||||
if (backfill != null && backfill.getInputs() != null) {
|
||||
inputs.putAll(runContext.render(backfill.getInputs()));
|
||||
}
|
||||
|
||||
return inputs;
|
||||
}
|
||||
|
||||
private static List<Label> getLabels(Schedulable trigger, RunContext runContext, Backfill backfill, FlowInterface flow) throws IllegalVariableEvaluationException {
|
||||
List<Label> labels = LabelService.fromTrigger(runContext, flow, (AbstractTrigger) trigger);
|
||||
|
||||
if (backfill != null && backfill.getLabels() != null) {
|
||||
for (Label label : backfill.getLabels()) {
|
||||
final var value = runContext.render(label.value());
|
||||
if (value != null) {
|
||||
labels.add(new Label(label.key(), value));
|
||||
}
|
||||
}
|
||||
}
|
||||
return labels;
|
||||
}
|
||||
}
|
||||
@@ -6,9 +6,7 @@ import com.cronutils.model.time.ExecutionTime;
|
||||
import com.cronutils.parser.CronParser;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.annotations.Example;
|
||||
import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
@@ -16,12 +14,8 @@ import io.kestra.core.models.conditions.Condition;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.conditions.ScheduleCondition;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.triggers.*;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.services.ConditionService;
|
||||
import io.kestra.core.services.LabelService;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.kestra.core.validations.ScheduleValidation;
|
||||
import io.kestra.core.validations.TimezoneId;
|
||||
@@ -29,6 +23,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Null;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -40,6 +35,8 @@ import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwPredicate;
|
||||
|
||||
@Slf4j
|
||||
@SuperBuilder
|
||||
@ToString
|
||||
@@ -224,11 +221,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
@PluginProperty
|
||||
@Deprecated
|
||||
private List<ScheduleCondition> scheduleConditions;
|
||||
|
||||
@Schema(
|
||||
title = "The inputs to pass to the scheduled flow"
|
||||
)
|
||||
@PluginProperty(dynamic = true)
|
||||
|
||||
private Map<String, Object> inputs;
|
||||
|
||||
@Schema(
|
||||
@@ -248,13 +241,7 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
@PluginProperty
|
||||
@Deprecated
|
||||
private Map<String, Object> backfill;
|
||||
|
||||
@Schema(
|
||||
title = "Action to take in the case of missed schedules",
|
||||
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
|
||||
"The default is `ALL` unless a different value is configured using the global plugin configuration."
|
||||
)
|
||||
@PluginProperty
|
||||
|
||||
private RecoverMissedSchedules recoverMissedSchedules;
|
||||
|
||||
@Override
|
||||
@@ -403,20 +390,11 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
if (!conditionResults) {
|
||||
return Optional.empty();
|
||||
}
|
||||
} catch(InternalException ie) {
|
||||
} catch (InternalException ie) {
|
||||
// validate schedule condition can fail to render variables
|
||||
// in this case, we return a failed execution so the trigger is not evaluated each second
|
||||
runContext.logger().error("Unable to evaluate the Schedule trigger '{}'", this.getId(), ie);
|
||||
Execution execution = Execution.builder()
|
||||
.id(runContext.getTriggerExecutionId())
|
||||
.tenantId(triggerContext.getTenantId())
|
||||
.namespace(triggerContext.getNamespace())
|
||||
.flowId(triggerContext.getFlowId())
|
||||
.flowRevision(conditionContext.getFlow().getRevision())
|
||||
.labels(generateLabels(runContext, conditionContext, backfill))
|
||||
.state(new State().withState(State.Type.FAILED))
|
||||
.build();
|
||||
return Optional.of(execution);
|
||||
return Optional.of(SchedulableExecutionFactory.createFailedExecution(this, conditionContext, triggerContext));
|
||||
}
|
||||
|
||||
// recalculate true output for previous and next based on conditions
|
||||
@@ -430,14 +408,12 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
variables = scheduleDates.toMap();
|
||||
}
|
||||
|
||||
Execution execution = TriggerService.generateScheduledExecution(
|
||||
Execution execution = SchedulableExecutionFactory.createExecution(
|
||||
this,
|
||||
conditionContext,
|
||||
triggerContext,
|
||||
generateLabels(runContext, conditionContext, backfill),
|
||||
generateInputs(runContext, backfill),
|
||||
variables,
|
||||
Optional.empty()
|
||||
null
|
||||
);
|
||||
|
||||
return Optional.of(execution);
|
||||
@@ -448,34 +424,6 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
return parser.parse(this.cron);
|
||||
}
|
||||
|
||||
private List<Label> generateLabels(RunContext runContext, ConditionContext conditionContext, Backfill backfill) throws IllegalVariableEvaluationException {
|
||||
List<Label> labels = LabelService.fromTrigger(runContext, conditionContext.getFlow(), this);
|
||||
|
||||
if (backfill != null && backfill.getLabels() != null) {
|
||||
for (Label label : backfill.getLabels()) {
|
||||
final var value = runContext.render(label.value());
|
||||
if (value != null) {
|
||||
labels.add(new Label(label.key(), value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return labels;
|
||||
}
|
||||
|
||||
private Map<String, Object> generateInputs(RunContext runContext, Backfill backfill) throws IllegalVariableEvaluationException {
|
||||
Map<String, Object> inputs = new HashMap<>();
|
||||
|
||||
if (this.inputs != null) {
|
||||
inputs.putAll(runContext.render(this.inputs));
|
||||
}
|
||||
|
||||
if (backfill != null && backfill.getInputs() != null) {
|
||||
inputs.putAll(runContext.render(backfill.getInputs()));
|
||||
}
|
||||
|
||||
return inputs;
|
||||
}
|
||||
private Optional<Output> scheduleDates(ExecutionTime executionTime, ZonedDateTime date) {
|
||||
Optional<ZonedDateTime> next = executionTime.nextExecution(date.minus(Duration.ofSeconds(1)));
|
||||
|
||||
@@ -549,9 +497,9 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
Optional<ZonedDateTime> truePreviousNextDateWithCondition(ExecutionTime executionTime, ConditionContext conditionContext, ZonedDateTime toTestDate, boolean next) throws InternalException {
|
||||
int upperYearBound = ZonedDateTime.now().getYear() + 10;
|
||||
int lowerYearBound = ZonedDateTime.now().getYear() - 10;
|
||||
|
||||
|
||||
while ((next && toTestDate.getYear() < upperYearBound) || (!next && toTestDate.getYear() > lowerYearBound)) {
|
||||
|
||||
|
||||
Optional<ZonedDateTime> currentDate = next ?
|
||||
executionTime.nextExecution(toTestDate) :
|
||||
executionTime.lastExecution(toTestDate);
|
||||
@@ -607,11 +555,10 @@ public class Schedule extends AbstractTrigger implements Schedulable, TriggerOut
|
||||
|
||||
private boolean validateScheduleCondition(ConditionContext conditionContext) throws InternalException {
|
||||
if (conditions != null) {
|
||||
ConditionService conditionService = ((DefaultRunContext)conditionContext.getRunContext()).getApplicationContext().getBean(ConditionService.class);
|
||||
return conditionService.isValid(
|
||||
conditions.stream().filter(c -> c instanceof ScheduleCondition).map(c -> (ScheduleCondition) c).toList(),
|
||||
conditionContext
|
||||
);
|
||||
return conditions.stream()
|
||||
.filter(c -> c instanceof ScheduleCondition)
|
||||
.map(c -> (ScheduleCondition) c)
|
||||
.allMatch(throwPredicate(condition -> condition.test(conditionContext)));
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
@@ -10,7 +10,6 @@ import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.tasks.VoidOutput;
|
||||
import io.kestra.core.models.triggers.*;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.services.LabelService;
|
||||
import io.kestra.core.validations.TimezoneId;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
@@ -23,7 +22,10 @@ import java.time.Duration;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
@@ -45,11 +47,7 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
@Builder.Default
|
||||
@Null
|
||||
private final Duration interval = null;
|
||||
|
||||
@Schema(
|
||||
title = "The inputs to pass to the scheduled flow"
|
||||
)
|
||||
@PluginProperty(dynamic = true)
|
||||
|
||||
private Map<String, Object> inputs;
|
||||
|
||||
@TimezoneId
|
||||
@@ -63,31 +61,24 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
@NotNull
|
||||
private Property<List<ZonedDateTime>> dates;
|
||||
|
||||
@Schema(
|
||||
title = "Action to take in the case of missed schedules",
|
||||
description = "`ALL` will recover all missed schedules, `LAST` will only recovered the last missing one, `NONE` will not recover any missing schedule.\n" +
|
||||
"The default is `ALL` unless a different value is configured using the global plugin configuration."
|
||||
)
|
||||
@PluginProperty
|
||||
private RecoverMissedSchedules recoverMissedSchedules;
|
||||
|
||||
@Override
|
||||
public Optional<Execution> evaluate(ConditionContext conditionContext, TriggerContext triggerContext) throws Exception {
|
||||
RunContext runContext = conditionContext.getRunContext();
|
||||
|
||||
ZonedDateTime lastEvaluation = triggerContext.getDate();
|
||||
Optional<ZonedDateTime> nextDate = nextDate(runContext, date -> date.isEqual(lastEvaluation) || date.isAfter(lastEvaluation));
|
||||
|
||||
if (nextDate.isPresent()) {
|
||||
log.info("Schedule execution on {}", nextDate.get());
|
||||
|
||||
Execution execution = TriggerService.generateScheduledExecution(
|
||||
Execution execution = SchedulableExecutionFactory.createExecution(
|
||||
this,
|
||||
conditionContext,
|
||||
triggerContext,
|
||||
LabelService.fromTrigger(runContext, conditionContext.getFlow(), this),
|
||||
this.inputs != null ? runContext.render(this.inputs) : Collections.emptyMap(),
|
||||
Collections.emptyMap(),
|
||||
nextDate
|
||||
nextDate.orElse(null)
|
||||
);
|
||||
|
||||
return Optional.of(execution);
|
||||
@@ -97,29 +88,21 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
}
|
||||
|
||||
@Override
|
||||
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> last) {
|
||||
try {
|
||||
return last
|
||||
.map(throwFunction(context ->
|
||||
nextDate(conditionContext.getRunContext(), date -> date.isAfter(context.getDate()))
|
||||
.orElse(ZonedDateTime.now().plusYears(1))
|
||||
))
|
||||
.orElse(conditionContext.getRunContext()
|
||||
.render(dates)
|
||||
.asList(ZonedDateTime.class)
|
||||
.stream()
|
||||
.sorted()
|
||||
.findFirst()
|
||||
.orElse(ZonedDateTime.now()))
|
||||
.truncatedTo(ChronoUnit.SECONDS);
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
|
||||
return ZonedDateTime.now().plusYears(1);
|
||||
}
|
||||
public ZonedDateTime nextEvaluationDate(ConditionContext conditionContext, Optional<? extends TriggerContext> triggerContext) {
|
||||
return triggerContext
|
||||
.map(ctx -> ctx.getBackfill() != null ? ctx.getBackfill().getCurrentDate() : ctx.getDate())
|
||||
.map(this::withTimeZone)
|
||||
.or(() -> Optional.of(ZonedDateTime.now()))
|
||||
.flatMap(dt -> {
|
||||
try {
|
||||
return nextDate(conditionContext.getRunContext(), date -> date.isAfter(dt));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
log.warn("Failed to evaluate schedule dates for trigger '{}': {}", this.getId(), e.getMessage());
|
||||
throw new InvalidTriggerConfigurationException("Failed to evaluate schedule 'dates'. Cause: " + e.getMessage());
|
||||
}
|
||||
}).orElseGet(() -> ZonedDateTime.now().plusYears(1));
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public ZonedDateTime nextEvaluationDate() {
|
||||
// TODO this may be the next date from now?
|
||||
@@ -139,9 +122,17 @@ public class ScheduleOnDates extends AbstractTrigger implements Schedulable, Tri
|
||||
return previousDates.isEmpty() ? ZonedDateTime.now() : previousDates.getFirst();
|
||||
}
|
||||
|
||||
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> filter) throws IllegalVariableEvaluationException {
|
||||
return runContext.render(dates).asList(ZonedDateTime.class).stream().sorted()
|
||||
.filter(date -> filter.test(date))
|
||||
private ZonedDateTime withTimeZone(ZonedDateTime date) {
|
||||
if (this.timezone == null) {
|
||||
return date;
|
||||
}
|
||||
return date.withZoneSameInstant(ZoneId.of(this.timezone));
|
||||
}
|
||||
|
||||
private Optional<ZonedDateTime> nextDate(RunContext runContext, Predicate<ZonedDateTime> predicate) throws IllegalVariableEvaluationException {
|
||||
return runContext.render(dates)
|
||||
.asList(ZonedDateTime.class).stream().sorted()
|
||||
.filter(predicate)
|
||||
.map(throwFunction(date -> timezone == null ? date : date.withZoneSameInstant(ZoneId.of(runContext.render(timezone)))))
|
||||
.findFirst()
|
||||
.map(date -> date.truncatedTo(ChronoUnit.SECONDS));
|
||||
|
||||
@@ -170,10 +170,11 @@ class JsonSchemaGeneratorTest {
|
||||
|
||||
Map<String, Object> jsonSchema = jsonSchemaGenerator.generate(AbstractTrigger.class, AbstractTrigger.class);
|
||||
assertThat((Map<String, Object>) jsonSchema.get("properties"), allOf(
|
||||
Matchers.aMapWithSize(3),
|
||||
Matchers.aMapWithSize(4),
|
||||
hasKey("conditions"),
|
||||
hasKey("stopAfter"),
|
||||
hasKey("type")
|
||||
hasKey("type"),
|
||||
hasKey("allowConcurrent")
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,24 +1,36 @@
|
||||
package io.kestra.core.models.property;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.kestra.core.context.TestRunContextFactory;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.serializers.FileSerde;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.storages.Namespace;
|
||||
import io.kestra.core.storages.NamespaceFile;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.plugin.core.namespace.Version;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.event.Level;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static java.util.Map.entry;
|
||||
@@ -362,10 +374,43 @@ class PropertyTest {
|
||||
assertThat(output.getMessages().getFirst().getValue()).isEqualTo("value1");
|
||||
}
|
||||
|
||||
@Test
|
||||
void jsonSubtype() throws JsonProcessingException, IllegalVariableEvaluationException {
|
||||
Optional<WithSubtype> rendered = runContextFactory.of().render(
|
||||
Property.<WithSubtype>ofExpression(JacksonMapper.ofJson().writeValueAsString(new MySubtype()))
|
||||
).as(WithSubtype.class);
|
||||
|
||||
assertThat(rendered).isPresent();
|
||||
assertThat(rendered.get()).isInstanceOf(MySubtype.class);
|
||||
|
||||
List<WithSubtype> renderedList = runContextFactory.of().render(
|
||||
Property.<List<WithSubtype>>ofExpression(JacksonMapper.ofJson().writeValueAsString(List.of(new MySubtype())))
|
||||
).asList(WithSubtype.class);
|
||||
assertThat(renderedList).hasSize(1);
|
||||
assertThat(renderedList.get(0)).isInstanceOf(MySubtype.class);
|
||||
}
|
||||
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", visible = true, include = JsonTypeInfo.As.EXISTING_PROPERTY)
|
||||
@JsonSubTypes({
|
||||
@JsonSubTypes.Type(value = MySubtype.class, name = "mySubtype")
|
||||
})
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Introspected
|
||||
public abstract static class WithSubtype {
|
||||
abstract public String getType();
|
||||
}
|
||||
|
||||
@Getter
|
||||
public static class MySubtype extends WithSubtype {
|
||||
private final String type = "mySubtype";
|
||||
}
|
||||
|
||||
|
||||
@Builder
|
||||
@Getter
|
||||
private static class TestObj {
|
||||
private String key;
|
||||
private String value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +60,15 @@ class SystemInformationReportTest {
|
||||
return setting;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Setting internalSave(Setting setting) throws ConstraintViolationException {
|
||||
if (setting.getKey().equals(Setting.INSTANCE_UUID)) {
|
||||
UUID = setting.getValue();
|
||||
}
|
||||
|
||||
return setting;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Setting delete(Setting setting) {
|
||||
return setting;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import com.devskiller.friendly_id.FriendlyId;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.exceptions.InvalidQueryFiltersException;
|
||||
import io.kestra.core.junit.annotations.FlakyTest;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
@@ -24,7 +24,6 @@ import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.tasks.ResolvedTask;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.NamespaceUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
@@ -42,10 +41,9 @@ import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.*;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Function;
|
||||
@@ -185,6 +183,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
@FlakyTest(description = "Filtering tests are sometimes returning 0")
|
||||
void should_find_all(QueryFilter filter, int expectedSize){
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant, "executionTriggerId");
|
||||
|
||||
@@ -78,6 +78,7 @@ public abstract class AbstractRunnerConcurrencyTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@FlakyTest(description = "Only flaky in CI")
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-queue-killed.yml"}, tenantId = "flow-concurrency-killed")
|
||||
void flowConcurrencyKilled() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyKilled("flow-concurrency-killed");
|
||||
|
||||
@@ -6,6 +6,7 @@ import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.TaskRun;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
@@ -466,4 +467,20 @@ class ExecutionServiceTest {
|
||||
assertThat(restart.getTaskRunList()).hasSize(2);
|
||||
assertThat(restart.findTaskRunsByTaskId("make_error").getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/each-pause.yaml"})
|
||||
void killExecutionWithFlowableTask() throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "each-pause");
|
||||
|
||||
TaskRun childTaskRun = execution.getTaskRunList().stream().filter(tr -> tr.getTaskId().equals("pause")).toList().getFirst();
|
||||
|
||||
Execution killed = executionService.killParentTaskruns(childTaskRun,execution);
|
||||
|
||||
TaskRun parentTaskRun = killed.getTaskRunList().stream().filter(tr -> tr.getTaskId().equals("each_task")).toList().getFirst();
|
||||
|
||||
assertThat(parentTaskRun.getState().getCurrent()).isEqualTo(State.Type.KILLED);
|
||||
assertThat(parentTaskRun.getAttempts().getLast().getState().getCurrent()).isEqualTo(State.Type.KILLED);
|
||||
|
||||
}
|
||||
}
|
||||
@@ -178,7 +178,10 @@ public class FlowConcurrencyCaseTest {
|
||||
// we restart the first one, it should be queued then fail again.
|
||||
Execution failedExecution = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution1);
|
||||
Execution restarted = executionService.restart(failedExecution, null);
|
||||
Execution executionResult1 = runnerUtils.restartExecution(e -> e.getState().getCurrent().equals(Type.FAILED), restarted);
|
||||
Execution executionResult1 = runnerUtils.restartExecution(
|
||||
e -> e.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED) && e.getState().getCurrent().equals(Type.FAILED),
|
||||
restarted
|
||||
);
|
||||
Execution executionResult2 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution2);
|
||||
|
||||
assertThat(executionResult1.getState().getCurrent()).isEqualTo(Type.FAILED);
|
||||
@@ -278,7 +281,6 @@ public class FlowConcurrencyCaseTest {
|
||||
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||
} finally {
|
||||
// kill everything to avoid dangling executions
|
||||
runnerUtils.killExecution(execution1);
|
||||
runnerUtils.killExecution(execution2);
|
||||
runnerUtils.killExecution(execution3);
|
||||
|
||||
@@ -321,7 +323,6 @@ public class FlowConcurrencyCaseTest {
|
||||
} finally {
|
||||
// kill everything to avoid dangling executions
|
||||
runnerUtils.killExecution(execution1);
|
||||
runnerUtils.killExecution(execution2);
|
||||
runnerUtils.killExecution(execution3);
|
||||
|
||||
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
|
||||
|
||||
@@ -239,7 +239,7 @@ class FlowInputOutputTest {
|
||||
// Then
|
||||
Assertions.assertEquals(2, values.size());
|
||||
Assertions.assertFalse(values.get(1).enabled());
|
||||
Assertions.assertNotNull(values.get(1).exception());
|
||||
Assertions.assertNotNull(values.get(1).exceptions());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -257,7 +257,7 @@ class FlowInputOutputTest {
|
||||
List<InputAndValue> values = flowInputOutput.validateExecutionInputs(List.of(input), null, DEFAULT_TEST_EXECUTION, data).block();
|
||||
|
||||
// Then
|
||||
Assertions.assertNull(values.getFirst().exception());
|
||||
Assertions.assertNull(values.getFirst().exceptions());
|
||||
Assertions.assertFalse(storageInterface.exists(MAIN_TENANT, null, URI.create(values.getFirst().value().toString())));
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package io.kestra.core.runners;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.io.CharStreams;
|
||||
import io.kestra.core.exceptions.InputOutputValidationException;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
@@ -137,8 +138,8 @@ public class InputsTest {
|
||||
void missingRequired() {
|
||||
HashMap<String, Object> inputs = new HashMap<>(InputsTest.inputs);
|
||||
inputs.put("string", null);
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(inputs, MAIN_TENANT));
|
||||
assertThat(e.getMessage()).contains("Invalid input for `string`, missing required input, but received `null`");
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(inputs, MAIN_TENANT));
|
||||
assertThat(e.getMessage()).contains("Missing required input:string");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -232,9 +233,9 @@ public class InputsTest {
|
||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||
map.put("validatedString", "foo");
|
||||
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(map, "tenant4"));
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(map, "tenant4"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedString`, it must match the pattern");
|
||||
assertThat(e.getMessage()).contains( "Invalid value for input `validatedString`. Cause: it must match the pattern");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -242,15 +243,15 @@ public class InputsTest {
|
||||
void inputValidatedIntegerBadValue() {
|
||||
HashMap<String, Object> mapMin = new HashMap<>(inputs);
|
||||
mapMin.put("validatedInt", "9");
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMin, "tenant5"));
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedInt`, it must be more than `10`, but received `9`");
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMin, "tenant5"));
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedInt`. Cause: it must be more than `10`");
|
||||
|
||||
HashMap<String, Object> mapMax = new HashMap<>(inputs);
|
||||
mapMax.put("validatedInt", "21");
|
||||
|
||||
e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMax, "tenant5"));
|
||||
e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMax, "tenant5"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedInt`, it must be less than `20`, but received `21`");
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedInt`. Cause: it must be less than `20`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -258,15 +259,15 @@ public class InputsTest {
|
||||
void inputValidatedDateBadValue() {
|
||||
HashMap<String, Object> mapMin = new HashMap<>(inputs);
|
||||
mapMin.put("validatedDate", "2022-01-01");
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMin, "tenant6"));
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedDate`, it must be after `2023-01-01`, but received `2022-01-01`");
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMin, "tenant6"));
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedDate`. Cause: it must be after `2023-01-01`");
|
||||
|
||||
HashMap<String, Object> mapMax = new HashMap<>(inputs);
|
||||
mapMax.put("validatedDate", "2024-01-01");
|
||||
|
||||
e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMax, "tenant6"));
|
||||
e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMax, "tenant6"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedDate`, it must be before `2023-12-31`, but received `2024-01-01`");
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedDate`. Cause: it must be before `2023-12-31`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -274,15 +275,15 @@ public class InputsTest {
|
||||
void inputValidatedDateTimeBadValue() {
|
||||
HashMap<String, Object> mapMin = new HashMap<>(inputs);
|
||||
mapMin.put("validatedDateTime", "2022-01-01T00:00:00Z");
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMin, "tenant7"));
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedDateTime`, it must be after `2023-01-01T00:00:00Z`, but received `2022-01-01T00:00:00Z`");
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMin, "tenant7"));
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedDateTime`. Cause: it must be after `2023-01-01T00:00:00Z`");
|
||||
|
||||
HashMap<String, Object> mapMax = new HashMap<>(inputs);
|
||||
mapMax.put("validatedDateTime", "2024-01-01T00:00:00Z");
|
||||
|
||||
e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMax, "tenant7"));
|
||||
e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMax, "tenant7"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedDateTime`, it must be before `2023-12-31T23:59:59Z`");
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedDateTime`. Cause: it must be before `2023-12-31T23:59:59Z`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -290,15 +291,15 @@ public class InputsTest {
|
||||
void inputValidatedDurationBadValue() {
|
||||
HashMap<String, Object> mapMin = new HashMap<>(inputs);
|
||||
mapMin.put("validatedDuration", "PT1S");
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMin, "tenant8"));
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedDuration`, It must be more than `PT10S`, but received `PT1S`");
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMin, "tenant8"));
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedDuration`. Cause: It must be more than `PT10S`");
|
||||
|
||||
HashMap<String, Object> mapMax = new HashMap<>(inputs);
|
||||
mapMax.put("validatedDuration", "PT30S");
|
||||
|
||||
e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMax, "tenant8"));
|
||||
e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMax, "tenant8"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedDuration`, It must be less than `PT20S`, but received `PT30S`");
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedDuration`. Cause: It must be less than `PT20S`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -306,15 +307,15 @@ public class InputsTest {
|
||||
void inputValidatedFloatBadValue() {
|
||||
HashMap<String, Object> mapMin = new HashMap<>(inputs);
|
||||
mapMin.put("validatedFloat", "0.01");
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMin, "tenant9"));
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedFloat`, it must be more than `0.1`, but received `0.01`");
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMin, "tenant9"));
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedFloat`. Cause: it must be more than `0.1`");
|
||||
|
||||
HashMap<String, Object> mapMax = new HashMap<>(inputs);
|
||||
mapMax.put("validatedFloat", "1.01");
|
||||
|
||||
e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMax, "tenant9"));
|
||||
e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMax, "tenant9"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedFloat`, it must be less than `0.5`, but received `1.01`");
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedFloat`. Cause: it must be less than `0.5`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -322,15 +323,15 @@ public class InputsTest {
|
||||
void inputValidatedTimeBadValue() {
|
||||
HashMap<String, Object> mapMin = new HashMap<>(inputs);
|
||||
mapMin.put("validatedTime", "00:00:01");
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMin, "tenant10"));
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedTime`, it must be after `01:00`, but received `00:00:01`");
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMin, "tenant10"));
|
||||
assertThat(e.getMessage()).contains( "Invalid value for input `validatedTime`. Cause: it must be after `01:00`");
|
||||
|
||||
HashMap<String, Object> mapMax = new HashMap<>(inputs);
|
||||
mapMax.put("validatedTime", "14:00:00");
|
||||
|
||||
e = assertThrows(ConstraintViolationException.class, () -> typedInputs(mapMax, "tenant10"));
|
||||
e = assertThrows(InputOutputValidationException.class, () -> typedInputs(mapMax, "tenant10"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `validatedTime`, it must be before `11:59:59`, but received `14:00:00`");
|
||||
assertThat(e.getMessage()).contains("Invalid value for input `validatedTime`. Cause: it must be before `11:59:59`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -339,9 +340,9 @@ public class InputsTest {
|
||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||
map.put("uri", "http:/bla");
|
||||
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(map, "tenant11"));
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(map, "tenant11"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `uri`, Expected `URI` but received `http:/bla`, but received `http:/bla`");
|
||||
assertThat(e.getMessage()).contains( "Invalid value for input `uri`. Cause: Invalid URI format." );
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -350,9 +351,9 @@ public class InputsTest {
|
||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||
map.put("enum", "INVALID");
|
||||
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(map, "tenant12"));
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(map, "tenant12"));
|
||||
|
||||
assertThat(e.getMessage()).isEqualTo("enum: Invalid input for `enum`, it must match the values `[ENUM_VALUE, OTHER_ONE]`, but received `INVALID`");
|
||||
assertThat(e.getMessage()).isEqualTo("Invalid value for input `enum`. Cause: it must match the values `[ENUM_VALUE, OTHER_ONE]`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -361,9 +362,9 @@ public class InputsTest {
|
||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||
map.put("array", "[\"s1\", \"s2\"]");
|
||||
|
||||
ConstraintViolationException e = assertThrows(ConstraintViolationException.class, () -> typedInputs(map, "tenant13"));
|
||||
InputOutputValidationException e = assertThrows(InputOutputValidationException.class, () -> typedInputs(map, "tenant13"));
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `array`, Unable to parse array element as `INT` on `s1`, but received `[\"s1\", \"s2\"]`");
|
||||
assertThat(e.getMessage()).contains( "Invalid value for input `array`. Cause: Unable to parse array element as `INT` on `s1`");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -467,7 +468,20 @@ public class InputsTest {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).isEqualTo(file.toString());
|
||||
}
|
||||
@Test
|
||||
@LoadFlows(value = "flows/invalids/inputs-with-multiple-constraint-violations.yaml")
|
||||
void multipleConstraintViolations() {
|
||||
InputOutputValidationException ex = assertThrows(InputOutputValidationException.class, ()-> runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "inputs-with-multiple-constraint-violations", null,
|
||||
(f, e) ->flowIO.readExecutionInputs(f, e , Map.of("multi", List.of("F", "H")) )));
|
||||
|
||||
List<String> messages = Arrays.asList(ex.getMessage().split(System.lineSeparator()));
|
||||
|
||||
assertThat(messages).containsExactlyInAnyOrder(
|
||||
"Invalid value for input `multi`. Cause: you can't define both `values` and `options`",
|
||||
"Invalid value for input `multi`. Cause: value `F` doesn't match the values `[A, B, C]`",
|
||||
"Invalid value for input `multi`. Cause: value `H` doesn't match the values `[A, B, C]`"
|
||||
);
|
||||
}
|
||||
private URI createFile() throws IOException {
|
||||
File tempFile = File.createTempFile("file", ".txt");
|
||||
Files.write(tempFile.toPath(), "Hello World".getBytes());
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.exceptions.InputOutputValidationException;
|
||||
import io.kestra.core.junit.annotations.ExecuteFlow;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
@@ -71,6 +72,6 @@ public class NoEncryptionConfiguredTest implements TestPropertyProvider {
|
||||
.flowId(flow.getId())
|
||||
.build();
|
||||
|
||||
assertThrows(ConstraintViolationException.class, () -> flowIO.readExecutionInputs(flow, execution, InputsTest.inputs));
|
||||
assertThrows(InputOutputValidationException.class, () -> flowIO.readExecutionInputs(flow, execution, InputsTest.inputs));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.Setting;
|
||||
import io.kestra.core.repositories.SettingRepositoryInterface;
|
||||
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@KestraTest
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@MicronautTest
|
||||
public class EditionProviderTest {
|
||||
@Inject
|
||||
private EditionProvider editionProvider;
|
||||
|
||||
@Inject
|
||||
private SettingRepositoryInterface settingRepository;
|
||||
|
||||
protected EditionProvider.Edition expectedEdition() {
|
||||
return EditionProvider.Edition.OSS;
|
||||
}
|
||||
@@ -17,5 +26,10 @@ public class EditionProviderTest {
|
||||
@Test
|
||||
void shouldReturnCurrentEdition() {
|
||||
Assertions.assertEquals(expectedEdition(), editionProvider.get());
|
||||
|
||||
// check that the edition is persisted in settings
|
||||
Optional<Setting> editionSettings = settingRepository.findByKey(Setting.INSTANCE_EDITION);
|
||||
assertThat(editionSettings).isPresent();
|
||||
assertThat(editionSettings.get().getValue()).isEqualTo(expectedEdition().name());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
package io.kestra.core.utils;
|
||||
|
||||
import io.kestra.core.models.Setting;
|
||||
import io.kestra.core.repositories.SettingRepositoryInterface;
|
||||
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@MicronautTest
|
||||
class VersionProviderTest {
|
||||
@Inject
|
||||
private VersionProvider versionProvider;
|
||||
|
||||
@Inject
|
||||
private SettingRepositoryInterface settingRepository;
|
||||
|
||||
@Test
|
||||
void shouldResolveVersion() {
|
||||
assertThat(versionProvider.getVersion()).endsWith("-SNAPSHOT");
|
||||
|
||||
// check that the version is persisted in settings
|
||||
Optional<Setting> versionSettings = settingRepository.findByKey(Setting.INSTANCE_VERSION);
|
||||
assertThat(versionSettings).isPresent();
|
||||
assertThat(versionSettings.get().getValue()).isEqualTo(versionProvider.getVersion());
|
||||
}
|
||||
}
|
||||
@@ -9,9 +9,15 @@ import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import io.kestra.core.models.validations.ValidateConstraintViolation;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.JsonLocation;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import java.util.List;
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.util.Optional;
|
||||
@@ -23,6 +29,107 @@ class FlowValidationTest {
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
private FlowService flowService;
|
||||
|
||||
private static final ObjectMapper mapper = new ObjectMapper();
|
||||
|
||||
// Helper class to create JsonProcessingException with location
|
||||
private static class TestJsonProcessingException extends JsonProcessingException {
|
||||
public TestJsonProcessingException(String msg, JsonLocation location) {
|
||||
super(msg, location);
|
||||
}
|
||||
public TestJsonProcessingException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_WithExpectedFieldName() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Expected a field name", new JsonLocation(null, 100, 5, 10));
|
||||
Object dummyTarget = new Object(); // Dummy target for toConstraintViolationException
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").contains("(at line 5)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_WithMappingStartEvent() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("MappingStartEvent", new JsonLocation(null, 200, 3, 5));
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Unexpected mapping start").contains("(at line 3)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_WithScalarValue() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Scalar value", new JsonLocation(null, 150, 7, 12));
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Expected a simple value").contains("(at line 7)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_GenericError() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Some other error", new JsonLocation(null, 50, 2, 8));
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML parsing error: Some other error").contains("(at line 2)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatYamlErrorMessage_NoLocation() throws JsonProcessingException {
|
||||
JsonProcessingException e = new TestJsonProcessingException("Expected a field name");
|
||||
Object dummyTarget = new Object();
|
||||
|
||||
ConstraintViolationException result = YamlParser.toConstraintViolationException(dummyTarget, "test resource", e);
|
||||
|
||||
assertThat(result.getMessage()).contains("YAML syntax error: Invalid structure").doesNotContain("at line");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testValidateFlowWithYamlSyntaxError() {
|
||||
String invalidYaml = """
|
||||
id: test-flow
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id:hello
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: {{ abc }}
|
||||
|
||||
""";
|
||||
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", invalidYaml);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.getFirst().getConstraints()).contains("YAML parsing error").contains("at line");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testValidateFlowWithUndefinedVariable() {
|
||||
String yamlWithUndefinedVar = """
|
||||
id: test-flow
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: hello
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: {{ undefinedVar }}
|
||||
""";
|
||||
|
||||
List<ValidateConstraintViolation> results = flowService.validate("my-tenant", yamlWithUndefinedVar);
|
||||
|
||||
assertThat(results).hasSize(1);
|
||||
assertThat(results.getFirst().getConstraints()).contains("Validation error");
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalidRecursiveFlow() {
|
||||
Flow flow = this.parse("flows/invalids/recursive-flow.yaml");
|
||||
@@ -130,4 +237,4 @@ class FlowValidationTest {
|
||||
|
||||
return YamlParser.parse(file, Flow.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.plugin.core.flow;
|
||||
|
||||
import com.google.common.io.CharStreams;
|
||||
import io.kestra.core.exceptions.InputOutputValidationException;
|
||||
import io.kestra.core.junit.annotations.ExecuteFlow;
|
||||
import io.kestra.core.junit.annotations.FlakyTest;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
@@ -328,12 +329,12 @@ public class PauseTest {
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.PAUSED);
|
||||
|
||||
ConstraintViolationException e = assertThrows(
|
||||
ConstraintViolationException.class,
|
||||
InputOutputValidationException e = assertThrows(
|
||||
InputOutputValidationException.class,
|
||||
() -> executionService.resume(execution, flow, State.Type.RUNNING, Mono.empty(), Pause.Resumed.now()).block()
|
||||
);
|
||||
|
||||
assertThat(e.getMessage()).contains("Invalid input for `asked`, missing required input, but received `null`");
|
||||
assertThat(e.getMessage()).contains( "Missing required input:asked");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
||||
@@ -8,6 +8,7 @@ import io.kestra.core.models.flows.Output;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.State.History;
|
||||
import io.kestra.core.runners.DefaultRunContext;
|
||||
import io.kestra.core.runners.InputAndOutput;
|
||||
import io.kestra.core.runners.SubflowExecutionResult;
|
||||
import io.kestra.core.services.VariablesService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -46,11 +47,15 @@ class SubflowTest {
|
||||
@Mock
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Mock
|
||||
private InputAndOutput inputAndOutput;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
Mockito.when(applicationContext.getBean(VariablesService.class)).thenReturn(new VariablesService());
|
||||
Mockito.when(runContext.logger()).thenReturn(LOG);
|
||||
Mockito.when(runContext.getApplicationContext()).thenReturn(applicationContext);
|
||||
Mockito.when(runContext.inputAndOutput()).thenReturn(inputAndOutput);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -118,7 +123,7 @@ class SubflowTest {
|
||||
|
||||
Map<String, Object> outputs = Map.of("key", "value");
|
||||
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(outputs);
|
||||
|
||||
Mockito.when(inputAndOutput.renderOutputs(Mockito.anyList())).thenReturn(Map.of("key", "value"));
|
||||
|
||||
Subflow subflow = Subflow.builder()
|
||||
.outputs(outputs)
|
||||
@@ -159,6 +164,7 @@ class SubflowTest {
|
||||
|
||||
Output output = Output.builder().id("key").value("value").build();
|
||||
Mockito.when(runContext.render(Mockito.anyMap())).thenReturn(Map.of(output.getId(), output.getValue()));
|
||||
Mockito.when(inputAndOutput.typedOutputs(Mockito.any(), Mockito.any(), Mockito.anyMap())).thenReturn(Map.of("key", "value"));
|
||||
Flow flow = Flow.builder()
|
||||
.outputs(List.of(output))
|
||||
.build();
|
||||
|
||||
@@ -57,7 +57,7 @@ class ScheduleOnDatesTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() throws Exception {
|
||||
public void shouldReturnFirstDateWhenNextEvaluationDateAndNoExistingTriggerDate() {
|
||||
// given
|
||||
var now = ZonedDateTime.now();
|
||||
var before = now.minusMinutes(1).truncatedTo(ChronoUnit.SECONDS);
|
||||
@@ -75,7 +75,7 @@ class ScheduleOnDatesTest {
|
||||
ZonedDateTime nextDate = scheduleOnDates.nextEvaluationDate(conditionContext, Optional.empty());
|
||||
|
||||
// then
|
||||
assertThat(nextDate).isEqualTo(before);
|
||||
assertThat(nextDate).isEqualTo(after);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -104,8 +104,9 @@ class ScheduleTest {
|
||||
);
|
||||
|
||||
assertThat(evaluate.isPresent()).isTrue();
|
||||
assertThat(evaluate.get().getLabels()).hasSize(3);
|
||||
assertThat(evaluate.get().getLabels()).hasSize(4);
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
|
||||
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
|
||||
var vars = evaluate.get().getTrigger().getVariables();
|
||||
var inputs = evaluate.get().getInputs();
|
||||
@@ -138,8 +139,9 @@ class ScheduleTest {
|
||||
);
|
||||
|
||||
assertThat(evaluate.isPresent()).isTrue();
|
||||
assertThat(evaluate.get().getLabels()).hasSize(3);
|
||||
assertThat(evaluate.get().getLabels()).hasSize(4);
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
|
||||
assertTrue(evaluate.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
|
||||
assertThat(evaluate.get().getVariables()).containsEntry("custom_var", "VARIABLE VALUE");
|
||||
var inputs = evaluate.get().getInputs();
|
||||
|
||||
@@ -645,14 +647,14 @@ class ScheduleTest {
|
||||
private ZonedDateTime dateFromVars(String date, ZonedDateTime expexted) {
|
||||
return ZonedDateTime.parse(date).withZoneSameInstant(expexted.getZone());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldGetNextExecutionDateWithConditionMatchingFutureDate() throws InternalException {
|
||||
|
||||
|
||||
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
|
||||
OffsetTime before = now.minusHours(1).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
OffsetTime after = now.minusHours(4).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
|
||||
|
||||
Schedule trigger = Schedule.builder()
|
||||
.id("schedule").type(Schedule.class.getName())
|
||||
.cron("0 * * * *") // every hour
|
||||
@@ -665,25 +667,25 @@ class ScheduleTest {
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
|
||||
|
||||
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
|
||||
|
||||
|
||||
ConditionContext conditionContext = ConditionContext.builder()
|
||||
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
|
||||
.build();
|
||||
|
||||
|
||||
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
|
||||
assertThat(result).isNotEmpty();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void shouldGetNextExecutionDateWithConditionMatchingCurrentDate() throws InternalException {
|
||||
|
||||
|
||||
ZonedDateTime now = ZonedDateTime.now().withZoneSameLocal(ZoneId.of("Europe/Paris"));
|
||||
|
||||
OffsetTime before = now.plusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
OffsetTime after = now.minusHours(2).toOffsetDateTime().toOffsetTime().withMinute(0).withSecond(0).withNano(0);
|
||||
|
||||
|
||||
Schedule trigger = Schedule.builder()
|
||||
.id("schedule").type(Schedule.class.getName())
|
||||
.cron("*/30 * * * * *")
|
||||
@@ -696,13 +698,13 @@ class ScheduleTest {
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
|
||||
|
||||
TriggerContext triggerContext = triggerContext(now, trigger).toBuilder().build();
|
||||
|
||||
|
||||
ConditionContext conditionContext = ConditionContext.builder()
|
||||
.runContext(runContextInitializer.forScheduler((DefaultRunContext) runContextFactory.of(), triggerContext, trigger))
|
||||
.build();
|
||||
|
||||
|
||||
Optional<ZonedDateTime> result = trigger.truePreviousNextDateWithCondition(trigger.executionTime(), conditionContext, now, true);
|
||||
assertThat(result).isNotEmpty();
|
||||
}
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
id: inputs-with-multiple-constraint-violations
|
||||
namespace: io.kestra.tests
|
||||
inputs:
|
||||
- id: multi
|
||||
type: MULTISELECT
|
||||
values:
|
||||
- A
|
||||
- B
|
||||
- C
|
||||
options:
|
||||
- X
|
||||
- Y
|
||||
- Z
|
||||
|
||||
tasks:
|
||||
- id: validMultiSelect
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "{{inputs.multi}}"
|
||||
10
core/src/test/resources/flows/valids/each-pause.yaml
Normal file
10
core/src/test/resources/flows/valids/each-pause.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
id: each-pause
|
||||
namespace: io.kestra.tests
|
||||
|
||||
tasks:
|
||||
- id: each_task
|
||||
type: io.kestra.plugin.core.flow.ForEach
|
||||
values: '["a", "b"]'
|
||||
tasks:
|
||||
- id: pause
|
||||
type: io.kestra.plugin.core.flow.Pause
|
||||
@@ -402,10 +402,11 @@ public class ExecutorService {
|
||||
|
||||
if (flow.getOutputs() != null) {
|
||||
RunContext runContext = runContextFactory.of(executor.getFlow(), executor.getExecution());
|
||||
var inputAndOutput = runContext.inputAndOutput();
|
||||
|
||||
try {
|
||||
Map<String, Object> outputs = FlowInputOutput.renderFlowOutputs(flow.getOutputs(), runContext);
|
||||
outputs = flowInputOutput.typedOutputs(flow, executor.getExecution(), outputs);
|
||||
Map<String, Object> outputs = inputAndOutput.renderOutputs(flow.getOutputs());
|
||||
outputs = inputAndOutput.typedOutputs(flow, executor.getExecution(), outputs);
|
||||
newExecution = newExecution.withOutputs(outputs);
|
||||
} catch (Exception e) {
|
||||
Logs.logExecution(
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
DROP INDEX logs_execution_id;
|
||||
DROP INDEX logs_execution_id__task_id;
|
||||
DROP INDEX logs_execution_id__taskrun_id;
|
||||
DROP INDEX logs_namespace_flow;
|
||||
|
||||
ALTER table logs drop column "deleted";
|
||||
|
||||
CREATE INDEX IF NOT EXISTS logs_execution_id ON logs ("execution_id");
|
||||
CREATE INDEX IF NOT EXISTS logs_execution_id__task_id ON logs ("execution_id", "task_id");
|
||||
CREATE INDEX IF NOT EXISTS logs_execution_id__taskrun_id ON logs ("execution_id", "taskrun_id");
|
||||
CREATE INDEX IF NOT EXISTS logs_namespace_flow ON logs ("tenant_id", "timestamp", "level", "namespace", "flow_id");
|
||||
|
||||
|
||||
DROP INDEX IF EXISTS metrics_flow_id;
|
||||
DROP INDEX IF EXISTS metrics_execution_id;
|
||||
DROP INDEX IF EXISTS metrics_timestamp;
|
||||
|
||||
ALTER TABLE metrics drop column "deleted";
|
||||
|
||||
CREATE INDEX IF NOT EXISTS metrics_flow_id ON metrics ("tenant_id", "namespace", "flow_id");
|
||||
CREATE INDEX IF NOT EXISTS metrics_execution_id ON metrics ("execution_id");
|
||||
CREATE INDEX IF NOT EXISTS metrics_timestamp ON metrics ("tenant_id", "timestamp");
|
||||
@@ -0,0 +1,22 @@
|
||||
ALTER TABLE logs DROP INDEX ix_execution_id;
|
||||
ALTER TABLE logs DROP INDEX ix_execution_id__task_id;
|
||||
ALTER TABLE logs DROP INDEX ix_execution_id__taskrun_id;
|
||||
ALTER TABLE logs DROP INDEX ix_namespace_flow;
|
||||
|
||||
ALTER table logs drop column `deleted`;
|
||||
|
||||
ALTER TABLE logs ADD INDEX ix_execution_id (`execution_id`), ALGORITHM=INPLACE, LOCK=NONE;
|
||||
ALTER TABLE logs ADD INDEX ix_execution_id__task_id (`execution_id`, `task_id`), ALGORITHM=INPLACE, LOCK=NONE;
|
||||
ALTER TABLE logs ADD INDEX ix_execution_id__taskrun_id (`execution_id`, `taskrun_id`), ALGORITHM=INPLACE, LOCK=NONE;
|
||||
ALTER TABLE logs ADD INDEX ix_namespace_flow (`tenant_id`, `timestamp`, `level`, `namespace`, `flow_id`), ALGORITHM=INPLACE, LOCK=NONE;
|
||||
|
||||
|
||||
ALTER TABLE metrics DROP INDEX metrics_flow_id;
|
||||
ALTER TABLE metrics DROP INDEX ix_metrics_execution_id;
|
||||
ALTER TABLE metrics DROP INDEX metrics_timestamp;
|
||||
|
||||
ALTER TABLE metrics drop column `deleted`;
|
||||
|
||||
ALTER TABLE metrics ADD INDEX ix_metrics_flow_id (`tenant_id`, `namespace`, `flow_id`), ALGORITHM=INPLACE, LOCK=NONE;
|
||||
ALTER TABLE metrics ADD INDEX ix_metrics_execution_id (`execution_id`), ALGORITHM=INPLACE, LOCK=NONE;
|
||||
ALTER TABLE metrics ADD INDEX ix_metrics_timestamp (`tenant_id`, `timestamp`), ALGORITHM=INPLACE, LOCK=NONE;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- Indices will be re-created by the next migration
|
||||
DROP INDEX logs_execution_id;
|
||||
DROP INDEX logs_execution_id__task_id;
|
||||
DROP INDEX logs_execution_id__taskrun_id;
|
||||
DROP INDEX logs_namespace_flow;
|
||||
|
||||
ALTER table logs drop column "deleted";
|
||||
|
||||
DROP INDEX IF EXISTS metrics_flow_id;
|
||||
DROP INDEX IF EXISTS metrics_execution_id;
|
||||
DROP INDEX IF EXISTS metrics_timestamp;
|
||||
|
||||
ALTER TABLE metrics drop column "deleted";
|
||||
@@ -0,0 +1,8 @@
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS logs_execution_id ON logs (execution_id);
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS logs_execution_id__task_id ON logs (execution_id, task_id);
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS logs_execution_id__taskrun_id ON logs (execution_id, taskrun_id);
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS logs_namespace_flow ON logs (tenant_id, timestamp, level, namespace, flow_id);
|
||||
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS metrics_flow_id ON metrics (tenant_id, namespace, flow_id);
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS metrics_execution_id ON metrics (execution_id);
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS metrics_timestamp ON metrics (tenant_id, timestamp);
|
||||
@@ -15,6 +15,11 @@ flyway:
|
||||
# We must ignore missing migrations as a V6 wrong migration was created and replaced by the V11
|
||||
ignore-migration-patterns: "*:missing,*:future"
|
||||
out-of-order: true
|
||||
properties:
|
||||
flyway:
|
||||
postgresql:
|
||||
transactional:
|
||||
lock: false
|
||||
|
||||
kestra:
|
||||
server-type: STANDALONE
|
||||
|
||||
@@ -257,10 +257,7 @@ public abstract class AbstractJdbcLogRepository extends AbstractJdbcCrudReposito
|
||||
DSLContext context = DSL.using(configuration);
|
||||
|
||||
return context.delete(this.jdbcRepository.getTable())
|
||||
// The deleted field is not used, so ti will always be false.
|
||||
// We add it here to be sure to use the correct index.
|
||||
.where(field("deleted", Boolean.class).eq(false))
|
||||
.and(field("execution_id", String.class).eq(execution.getId()))
|
||||
.where(field("execution_id", String.class).eq(execution.getId()))
|
||||
.execute();
|
||||
});
|
||||
}
|
||||
@@ -273,10 +270,7 @@ public abstract class AbstractJdbcLogRepository extends AbstractJdbcCrudReposito
|
||||
DSLContext context = DSL.using(configuration);
|
||||
|
||||
return context.delete(this.jdbcRepository.getTable())
|
||||
// The deleted field is not used, so ti will always be false.
|
||||
// We add it here to be sure to use the correct index.
|
||||
.where(field("deleted", Boolean.class).eq(false))
|
||||
.and(field("execution_id", String.class).in(executions.stream().map(Execution::getId).toList()))
|
||||
.where(field("execution_id", String.class).in(executions.stream().map(Execution::getId).toList()))
|
||||
.execute();
|
||||
});
|
||||
}
|
||||
@@ -496,5 +490,15 @@ public abstract class AbstractJdbcLogRepository extends AbstractJdbcCrudReposito
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Condition defaultFilter(String tenantId) {
|
||||
return buildTenantCondition(tenantId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Condition defaultFilter() {
|
||||
return DSL.trueCondition();
|
||||
}
|
||||
|
||||
abstract protected Field<Date> formatDateField(String dateField, DateUtils.GroupType groupType);
|
||||
}
|
||||
|
||||
@@ -185,10 +185,7 @@ public abstract class AbstractJdbcMetricRepository extends AbstractJdbcCrudRepos
|
||||
DSLContext context = DSL.using(configuration);
|
||||
|
||||
return context.delete(this.jdbcRepository.getTable())
|
||||
// The deleted field is not used, so ti will always be false.
|
||||
// We add it here to be sure to use the correct index.
|
||||
.where(field("deleted", Boolean.class).eq(false))
|
||||
.and(field("execution_id", String.class).eq(execution.getId()))
|
||||
.where(field("execution_id", String.class).eq(execution.getId()))
|
||||
.execute();
|
||||
});
|
||||
}
|
||||
@@ -201,14 +198,21 @@ public abstract class AbstractJdbcMetricRepository extends AbstractJdbcCrudRepos
|
||||
DSLContext context = DSL.using(configuration);
|
||||
|
||||
return context.delete(this.jdbcRepository.getTable())
|
||||
// The deleted field is not used, so ti will always be false.
|
||||
// We add it here to be sure to use the correct index.
|
||||
.where(field("deleted", Boolean.class).eq(false))
|
||||
.and(field("execution_id", String.class).in(executions.stream().map(Execution::getId).toList()))
|
||||
.where(field("execution_id", String.class).in(executions.stream().map(Execution::getId).toList()))
|
||||
.execute();
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Condition defaultFilter(String tenantId) {
|
||||
return buildTenantCondition(tenantId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Condition defaultFilter() {
|
||||
return DSL.trueCondition();
|
||||
}
|
||||
|
||||
private List<String> queryDistinct(String tenantId, Condition condition, String field) {
|
||||
return this.jdbcRepository
|
||||
.getDslContextWrapper()
|
||||
|
||||
@@ -44,9 +44,15 @@ public abstract class AbstractJdbcSettingRepository extends AbstractJdbcCrudRepo
|
||||
|
||||
@Override
|
||||
public Setting save(Setting setting) {
|
||||
this.eventPublisher.publishEvent(new CrudEvent<>(setting, CrudEventType.UPDATE));
|
||||
|
||||
return internalSave(setting);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Setting internalSave(Setting setting) {
|
||||
Map<Field<Object>, Object> fields = this.jdbcRepository.persistFields(setting);
|
||||
this.jdbcRepository.persist(setting, fields);
|
||||
this.eventPublisher.publishEvent(new CrudEvent<>(setting, CrudEventType.UPDATE));
|
||||
|
||||
return setting;
|
||||
}
|
||||
|
||||
@@ -72,12 +72,12 @@ public abstract class AbstractJdbcTriggerRepository extends AbstractJdbcCrudRepo
|
||||
|
||||
@Override
|
||||
public Optional<Trigger> findLast(TriggerContext trigger) {
|
||||
return findOne(DSL.trueCondition(), field("key").eq(trigger.uid()));
|
||||
return findByUid(trigger.uid());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Trigger> findByExecution(Execution execution) {
|
||||
return findOne(execution.getTenantId(), field("execution_id").eq(execution.getId()));
|
||||
public Optional<Trigger> findByUid(String uid) {
|
||||
return findOne(DSL.trueCondition(), field("key").eq(uid));
|
||||
}
|
||||
|
||||
public List<Trigger> findByNextExecutionDateReadyForAllTenants(ZonedDateTime now, ScheduleContextInterface scheduleContextInterface) {
|
||||
|
||||
@@ -13,6 +13,7 @@ import io.kestra.core.models.tasks.ExecutableTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.WorkerGroup;
|
||||
import io.kestra.core.models.topologies.FlowTopology;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.models.triggers.multipleflows.MultipleCondition;
|
||||
import io.kestra.core.models.triggers.multipleflows.MultipleConditionStorageInterface;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
@@ -1138,9 +1139,7 @@ public class JdbcExecutor implements ExecutorInterface {
|
||||
execution.getTrigger().getId()
|
||||
);
|
||||
} else {
|
||||
triggerRepository
|
||||
.findByExecution(execution)
|
||||
.ifPresent(trigger -> this.triggerState.update(executionService.resetExecution(flow, execution, trigger)));
|
||||
triggerRepository.findByUid(Trigger.uid(execution)).ifPresent(trigger -> this.triggerState.update(executionService.resetExecution(flow, execution, trigger)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1241,11 +1240,7 @@ public class JdbcExecutor implements ExecutorInterface {
|
||||
// purge the trigger: reset scheduler trigger at end
|
||||
if (execution.getTrigger() != null) {
|
||||
FlowWithSource flow = executor.getFlow();
|
||||
triggerRepository
|
||||
.findByExecution(execution)
|
||||
.ifPresent(trigger -> {
|
||||
this.triggerState.update(executionService.resetExecution(flow, execution, trigger));
|
||||
});
|
||||
triggerRepository.findByUid(Trigger.uid(execution)).ifPresent(trigger -> this.triggerState.update(executionService.resetExecution(flow, execution, trigger)));
|
||||
}
|
||||
|
||||
// Purge the workerTaskResultQueue and the workerJobQueue
|
||||
|
||||
@@ -30,14 +30,16 @@ dependencies {
|
||||
// as Jackson is in the Micronaut BOM, to force its version we need to use enforcedPlatform but it didn't really work, see later :(
|
||||
api enforcedPlatform("com.fasterxml.jackson:jackson-bom:$jacksonVersion")
|
||||
api enforcedPlatform("org.slf4j:slf4j-api:$slf4jVersion")
|
||||
api platform("io.micronaut.platform:micronaut-platform:4.9.4")
|
||||
api platform("io.qameta.allure:allure-bom:2.31.0")
|
||||
api platform("io.micronaut.platform:micronaut-platform:4.10.5")
|
||||
api platform("io.qameta.allure:allure-bom:2.32.0")
|
||||
// we define cloud bom here for GCP, Azure and AWS so they are aligned for all plugins that use them (secret, storage, oss and ee plugins)
|
||||
api platform('com.google.cloud:libraries-bom:26.72.0')
|
||||
api platform('com.google.cloud:libraries-bom:26.73.0')
|
||||
api platform("com.azure:azure-sdk-bom:1.3.3")
|
||||
api platform('software.amazon.awssdk:bom:2.40.5')
|
||||
api platform('software.amazon.awssdk:bom:2.40.10')
|
||||
api platform("dev.langchain4j:langchain4j-bom:$langchain4jVersion")
|
||||
api platform("dev.langchain4j:langchain4j-community-bom:$langchain4jCommunityVersion")
|
||||
// Micronaut 4.10 brings a Jetty version no compatible with the one from Wiremock so we bump it here
|
||||
api platform("org.eclipse.jetty.ee10:jetty-ee10-bom:12.1.2")
|
||||
|
||||
constraints {
|
||||
// downgrade to proto 1.3.2-alpha as 1.5.0 needs protobuf 4
|
||||
@@ -77,12 +79,12 @@ dependencies {
|
||||
api "org.apache.kafka:kafka-clients:$kafkaVersion"
|
||||
api "org.apache.kafka:kafka-streams:$kafkaVersion"
|
||||
// AWS CRT is not included in the AWS BOM but needed for the S3 Transfer manager
|
||||
api 'software.amazon.awssdk.crt:aws-crt:0.40.3'
|
||||
api 'software.amazon.awssdk.crt:aws-crt:0.41.0'
|
||||
|
||||
// Other libs
|
||||
api("org.projectlombok:lombok:1.18.42")
|
||||
api("org.codehaus.janino:janino:3.1.12")
|
||||
api group: 'org.apache.logging.log4j', name: 'log4j-to-slf4j', version: '2.25.2'
|
||||
api group: 'org.apache.logging.log4j', name: 'log4j-to-slf4j', version: '2.25.3'
|
||||
api group: 'org.slf4j', name: 'jul-to-slf4j', version: slf4jVersion
|
||||
api group: 'org.slf4j', name: 'jcl-over-slf4j', version: slf4jVersion
|
||||
api group: 'org.fusesource.jansi', name: 'jansi', version: '2.4.2'
|
||||
@@ -99,11 +101,11 @@ dependencies {
|
||||
api group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-file', version: mavenResolverVersion
|
||||
api group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-apache', version: mavenResolverVersion
|
||||
api 'com.github.oshi:oshi-core:6.9.1'
|
||||
api 'io.pebbletemplates:pebble:4.0.0'
|
||||
api 'io.pebbletemplates:pebble:4.1.0'
|
||||
api group: 'co.elastic.logging', name: 'logback-ecs-encoder', version: '1.7.0'
|
||||
api group: 'de.focus-shift', name: 'jollyday-core', version: jollydayVersion
|
||||
api group: 'de.focus-shift', name: 'jollyday-jaxb', version: jollydayVersion
|
||||
api 'nl.basjes.gitignore:gitignore-reader:1.13.0'
|
||||
api 'nl.basjes.gitignore:gitignore-reader:1.14.1'
|
||||
api group: 'dev.failsafe', name: 'failsafe', version: '3.3.2'
|
||||
api group: 'com.cronutils', name: 'cron-utils', version: '9.2.1'
|
||||
api group: 'com.github.victools', name: 'jsonschema-generator', version: jsonschemaVersion
|
||||
|
||||
@@ -288,7 +288,7 @@ public abstract class AbstractScheduler implements Scheduler {
|
||||
disableInvalidTrigger(workerTriggerResult.getTriggerContext(), e);
|
||||
return;
|
||||
}
|
||||
this.handleEvaluateWorkerTriggerResult(triggerExecution, nextExecutionDate);
|
||||
this.handleEvaluateWorkerTriggerResult(triggerExecution, nextExecutionDate, workerTriggerResult.getTrigger());
|
||||
} else {
|
||||
ZonedDateTime nextExecutionDate;
|
||||
try {
|
||||
@@ -768,7 +768,7 @@ public abstract class AbstractScheduler implements Scheduler {
|
||||
}
|
||||
|
||||
private void handleEvaluateWorkerTriggerResult(SchedulerExecutionWithTrigger result, ZonedDateTime
|
||||
nextExecutionDate) {
|
||||
nextExecutionDate, AbstractTrigger abstractTrigger) {
|
||||
Optional.ofNullable(result)
|
||||
.ifPresent(executionWithTrigger -> {
|
||||
log(executionWithTrigger);
|
||||
@@ -779,6 +779,12 @@ public abstract class AbstractScheduler implements Scheduler {
|
||||
nextExecutionDate
|
||||
);
|
||||
|
||||
// if the trigger is allowed to run concurrently we do not attached the executio-id to the trigger state
|
||||
// i.e., the trigger will not be locked
|
||||
if (abstractTrigger.isAllowConcurrent()) {
|
||||
trigger = trigger.toBuilder().executionId(null).build();
|
||||
}
|
||||
|
||||
// Worker triggers result is evaluated in another thread with the workerTriggerResultQueue.
|
||||
// We can then update the trigger directly.
|
||||
this.saveLastTriggerAndEmitExecution(executionWithTrigger.getExecution(), trigger, triggerToSave -> this.triggerState.update(triggerToSave));
|
||||
@@ -800,6 +806,12 @@ public abstract class AbstractScheduler implements Scheduler {
|
||||
if (result.getExecution().getState().getCurrent() == State.Type.FAILED) {
|
||||
trigger = trigger.resetExecution(State.Type.FAILED);
|
||||
}
|
||||
|
||||
// if the trigger is allowed to run concurrently we do not attached the executio-id to the trigger state
|
||||
// i.e., the trigger will not be locked
|
||||
if (((AbstractTrigger)schedule).isAllowConcurrent()) {
|
||||
trigger = trigger.toBuilder().executionId(null).build();
|
||||
}
|
||||
|
||||
// Schedule triggers are being executed directly from the handle method within the context where triggers are locked.
|
||||
// So we must save them by passing the scheduleContext.
|
||||
|
||||
@@ -91,6 +91,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
|
||||
assertThat(queueCount.getCount()).isEqualTo(0L);
|
||||
assertThat(last.get()).isNotNull();
|
||||
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
|
||||
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,6 +137,7 @@ public class SchedulerPollingTriggerTest extends AbstractSchedulerTest {
|
||||
assertThat(queueCount.getCount()).isEqualTo(0L);
|
||||
assertThat(last.get()).isNotNull();
|
||||
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
|
||||
assertTrue(last.get().getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
|
||||
|
||||
// Assert that the trigger is now disabled.
|
||||
// It needs to await on assertion as it will be disabled AFTER we receive a success execution.
|
||||
|
||||
@@ -104,6 +104,7 @@ public class SchedulerStreamingTest extends AbstractSchedulerTest {
|
||||
assertThat(SchedulerStreamingTest.startedEvaluate.get(false), is(1));
|
||||
assertThat(last.getTrigger().getVariables().get("startedEvaluate"), is(1));
|
||||
assertTrue(last.getLabels().stream().anyMatch(label -> label.key().equals(Label.CORRELATION_ID)));
|
||||
assertTrue(last.getLabels().stream().anyMatch(label -> label.equals(new Label(Label.FROM, "trigger"))));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
415
ui/package-lock.json
generated
415
ui/package-lock.json
generated
@@ -45,17 +45,17 @@
|
||||
"path-browserify": "^1.0.1",
|
||||
"pdfjs-dist": "^5.4.449",
|
||||
"pinia": "^3.0.4",
|
||||
"posthog-js": "^1.304.0",
|
||||
"posthog-js": "^1.308.0",
|
||||
"rapidoc": "^9.3.8",
|
||||
"semver": "^7.7.3",
|
||||
"shiki": "^3.19.0",
|
||||
"shiki": "^3.20.0",
|
||||
"vue": "^3.5.25",
|
||||
"vue-axios": "^3.5.2",
|
||||
"vue-chartjs": "^5.3.3",
|
||||
"vue-gtag": "^3.6.3",
|
||||
"vue-i18n": "^11.2.2",
|
||||
"vue-material-design-icons": "^5.3.1",
|
||||
"vue-router": "^4.6.3",
|
||||
"vue-router": "^4.6.4",
|
||||
"vue-sidebar-menu": "^5.9.1",
|
||||
"vue-virtual-scroller": "^2.0.0-beta.8",
|
||||
"vue3-popper": "^1.5.0",
|
||||
@@ -66,10 +66,10 @@
|
||||
"devDependencies": {
|
||||
"@codecov/vite-plugin": "^1.9.1",
|
||||
"@esbuild-plugins/node-modules-polyfill": "^0.2.2",
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@playwright/test": "^1.57.0",
|
||||
"@rushstack/eslint-patch": "^1.14.1",
|
||||
"@shikijs/markdown-it": "^3.19.0",
|
||||
"@shikijs/markdown-it": "^3.20.0",
|
||||
"@storybook/addon-themes": "^9.1.16",
|
||||
"@storybook/addon-vitest": "^9.1.16",
|
||||
"@storybook/test-runner": "^0.23.0",
|
||||
@@ -77,14 +77,14 @@
|
||||
"@types/humanize-duration": "^3.27.4",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/moment": "^2.13.0",
|
||||
"@types/node": "^25.0.0",
|
||||
"@types/node": "^25.0.3",
|
||||
"@types/nprogress": "^0.2.3",
|
||||
"@types/path-browserify": "^1.0.3",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/testing-library__jest-dom": "^6.0.0",
|
||||
"@types/testing-library__user-event": "^4.2.0",
|
||||
"@typescript-eslint/parser": "^8.49.0",
|
||||
"@vitejs/plugin-vue": "^6.0.2",
|
||||
"@typescript-eslint/parser": "^8.50.0",
|
||||
"@vitejs/plugin-vue": "^6.0.3",
|
||||
"@vitejs/plugin-vue-jsx": "^5.1.2",
|
||||
"@vitest/browser": "^3.2.4",
|
||||
"@vitest/coverage-v8": "^3.2.4",
|
||||
@@ -93,7 +93,7 @@
|
||||
"@vueuse/router": "^14.1.0",
|
||||
"change-case": "5.4.4",
|
||||
"cross-env": "^10.1.0",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-plugin-storybook": "^9.1.16",
|
||||
"eslint-plugin-vue": "^9.33.0",
|
||||
"globals": "^16.5.0",
|
||||
@@ -106,29 +106,29 @@
|
||||
"playwright": "^1.55.0",
|
||||
"prettier": "^3.7.4",
|
||||
"rimraf": "^6.1.2",
|
||||
"rolldown-vite": "^7.2.10",
|
||||
"rolldown-vite": "^7.2.11",
|
||||
"rollup-plugin-copy": "^3.5.0",
|
||||
"sass": "^1.96.0",
|
||||
"sass": "^1.97.0",
|
||||
"storybook": "^9.1.16",
|
||||
"storybook-vue3-router": "^6.0.2",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.49.0",
|
||||
"typescript-eslint": "^8.50.0",
|
||||
"uuid": "^13.0.0",
|
||||
"vite": "npm:rolldown-vite@latest",
|
||||
"vitest": "^3.2.4",
|
||||
"vue-tsc": "^3.1.8"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@esbuild/darwin-arm64": "^0.27.1",
|
||||
"@esbuild/darwin-x64": "^0.27.1",
|
||||
"@esbuild/linux-x64": "^0.27.1",
|
||||
"@rollup/rollup-darwin-arm64": "^4.53.3",
|
||||
"@rollup/rollup-darwin-x64": "^4.53.3",
|
||||
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
|
||||
"@swc/core-darwin-arm64": "^1.15.3",
|
||||
"@swc/core-darwin-x64": "^1.15.3",
|
||||
"@swc/core-linux-x64-gnu": "^1.15.3"
|
||||
"@esbuild/darwin-arm64": "^0.27.2",
|
||||
"@esbuild/darwin-x64": "^0.27.2",
|
||||
"@esbuild/linux-x64": "^0.27.2",
|
||||
"@rollup/rollup-darwin-arm64": "^4.53.5",
|
||||
"@rollup/rollup-darwin-x64": "^4.53.5",
|
||||
"@rollup/rollup-linux-x64-gnu": "^4.53.5",
|
||||
"@swc/core-darwin-arm64": "^1.15.5",
|
||||
"@swc/core-darwin-x64": "^1.15.5",
|
||||
"@swc/core-linux-x64-gnu": "^1.15.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@acemir/cssom": {
|
||||
@@ -1345,9 +1345,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-arm64": {
|
||||
"version": "0.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.1.tgz",
|
||||
"integrity": "sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==",
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz",
|
||||
"integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -1361,9 +1361,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-x64": {
|
||||
"version": "0.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.1.tgz",
|
||||
"integrity": "sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==",
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz",
|
||||
"integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -1547,9 +1547,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-x64": {
|
||||
"version": "0.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.1.tgz",
|
||||
"integrity": "sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==",
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz",
|
||||
"integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -1894,9 +1894,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/js": {
|
||||
"version": "9.39.1",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz",
|
||||
"integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==",
|
||||
"version": "9.39.2",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.2.tgz",
|
||||
"integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -3860,9 +3860,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@posthog/core": {
|
||||
"version": "1.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.7.1.tgz",
|
||||
"integrity": "sha512-kjK0eFMIpKo9GXIbts8VtAknsoZ18oZorANdtuTj1CbgS28t4ZVq//HAWhnxEuXRTrtkd+SUJ6Ux3j2Af8NCuA==",
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.8.0.tgz",
|
||||
"integrity": "sha512-SfmG1EdbR+2zpQccgBUxM/snCROB9WGkY7VH1r9iaoTNqoaN9IkmIEA/07cZLY4DxVP8jt6Vdfe3s84xksac1g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cross-spawn": "^7.0.6"
|
||||
@@ -4107,16 +4107,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/pluginutils": {
|
||||
"version": "1.0.0-beta.50",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.50.tgz",
|
||||
"integrity": "sha512-5e76wQiQVeL1ICOZVUg4LSOVYg9jyhGCin+icYozhsUzM+fHE7kddi1bdiE0jwVqTfkjba3jUFbEkoC9WkdvyA==",
|
||||
"version": "1.0.0-beta.53",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.53.tgz",
|
||||
"integrity": "sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-arm64": {
|
||||
"version": "4.53.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.3.tgz",
|
||||
"integrity": "sha512-Nr7SlQeqIBpOV6BHHGZgYBuSdanCXuw09hon14MGOLGmXAFYjx1wNvquVPmpZnl0tLjg25dEdr4IQ6GgyToCUA==",
|
||||
"version": "4.53.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.5.tgz",
|
||||
"integrity": "sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4127,9 +4127,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-x64": {
|
||||
"version": "4.53.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.3.tgz",
|
||||
"integrity": "sha512-DZ8N4CSNfl965CmPktJ8oBnfYr3F8dTTNBQkRlffnUarJ2ohudQD17sZBa097J8xhQ26AwhHJ5mvUyQW8ddTsQ==",
|
||||
"version": "4.53.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.5.tgz",
|
||||
"integrity": "sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4140,9 +4140,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-gnu": {
|
||||
"version": "4.53.3",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.3.tgz",
|
||||
"integrity": "sha512-3EhFi1FU6YL8HTUJZ51imGJWEX//ajQPfqWLI3BQq4TlvHy4X0MOr5q3D2Zof/ka0d5FNdPwZXm3Yyib/UEd+w==",
|
||||
"version": "4.53.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.5.tgz",
|
||||
"integrity": "sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4179,20 +4179,20 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/engine-javascript": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.19.0.tgz",
|
||||
"integrity": "sha512-ZfWJNm2VMhKkQIKT9qXbs76RRcT0SF/CAvEz0+RkpUDAoDaCx0uFdCGzSRiD9gSlhm6AHkjdieOBJMaO2eC1rQ==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-3.20.0.tgz",
|
||||
"integrity": "sha512-OFx8fHAZuk7I42Z9YAdZ95To6jDePQ9Rnfbw9uSRTSbBhYBp1kEOKv/3jOimcj3VRUKusDYM6DswLauwfhboLg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/types": "3.19.0",
|
||||
"@shikijs/types": "3.20.0",
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
"oniguruma-to-es": "^4.3.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/engine-javascript/node_modules/@shikijs/types": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.19.0.tgz",
|
||||
"integrity": "sha512-Z2hdeEQlzuntf/BZpFG8a+Fsw9UVXdML7w0o3TgSXV3yNESGon+bs9ITkQb3Ki7zxoXOOu5oJWqZ2uto06V9iQ==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
|
||||
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
@@ -4200,19 +4200,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/engine-oniguruma": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.19.0.tgz",
|
||||
"integrity": "sha512-1hRxtYIJfJSZeM5ivbUXv9hcJP3PWRo5prG/V2sWwiubUKTa+7P62d2qxCW8jiVFX4pgRHhnHNp+qeR7Xl+6kg==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-3.20.0.tgz",
|
||||
"integrity": "sha512-Yx3gy7xLzM0ZOjqoxciHjA7dAt5tyzJE3L4uQoM83agahy+PlW244XJSrmJRSBvGYELDhYXPacD4R/cauV5bzQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/types": "3.19.0",
|
||||
"@shikijs/types": "3.20.0",
|
||||
"@shikijs/vscode-textmate": "^10.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/engine-oniguruma/node_modules/@shikijs/types": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.19.0.tgz",
|
||||
"integrity": "sha512-Z2hdeEQlzuntf/BZpFG8a+Fsw9UVXdML7w0o3TgSXV3yNESGon+bs9ITkQb3Ki7zxoXOOu5oJWqZ2uto06V9iQ==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
|
||||
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
@@ -4220,23 +4220,33 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/langs": {
|
||||
"version": "3.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.15.0.tgz",
|
||||
"integrity": "sha512-WpRvEFvkVvO65uKYW4Rzxs+IG0gToyM8SARQMtGGsH4GDMNZrr60qdggXrFOsdfOVssG/QQGEl3FnJ3EZ+8w8A==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.20.0.tgz",
|
||||
"integrity": "sha512-le+bssCxcSHrygCWuOrYJHvjus6zhQ2K7q/0mgjiffRbkhM4o1EWu2m+29l0yEsHDbWaWPNnDUTRVVBvBBeKaA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/types": "3.15.0"
|
||||
"@shikijs/types": "3.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/langs/node_modules/@shikijs/types": {
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
|
||||
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
"@types/hast": "^3.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/markdown-it": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/markdown-it/-/markdown-it-3.19.0.tgz",
|
||||
"integrity": "sha512-T9Lt3pPZoK5uu7t2/jQ6OzhwZ9FPBtZhHKSk62IdPaJkj2gVPeRWSJb825dpliSFvURp4JQvcInBkrJZYmhynQ==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/markdown-it/-/markdown-it-3.20.0.tgz",
|
||||
"integrity": "sha512-2zX7wC0ow3zJsUr29tCoSYeKrGkI+RuOyAGeMiCf5FT4Qq36/cVAzhNBxjR8UQ49bhte5R1JKw/KLpAFEbbbkg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"markdown-it": "^14.1.0",
|
||||
"shiki": "3.19.0"
|
||||
"shiki": "3.20.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"markdown-it-async": "^2.2.0"
|
||||
@@ -4248,12 +4258,22 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/themes": {
|
||||
"version": "3.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.15.0.tgz",
|
||||
"integrity": "sha512-8ow2zWb1IDvCKjYb0KiLNrK4offFdkfNVPXb1OZykpLCzRU6j+efkY+Y7VQjNlNFXonSw+4AOdGYtmqykDbRiQ==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.20.0.tgz",
|
||||
"integrity": "sha512-U1NSU7Sl26Q7ErRvJUouArxfM2euWqq1xaSrbqMu2iqa+tSp0D1Yah8216sDYbdDHw4C8b75UpE65eWorm2erQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/types": "3.15.0"
|
||||
"@shikijs/types": "3.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/themes/node_modules/@shikijs/types": {
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
|
||||
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
"@types/hast": "^3.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@shikijs/transformers": {
|
||||
@@ -5164,9 +5184,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-arm64": {
|
||||
"version": "1.15.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.3.tgz",
|
||||
"integrity": "sha512-AXfeQn0CvcQ4cndlIshETx6jrAM45oeUrK8YeEY6oUZU/qzz0Id0CyvlEywxkWVC81Ajpd8TQQ1fW5yx6zQWkQ==",
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.5.tgz",
|
||||
"integrity": "sha512-RvdpUcXrIz12yONzOdQrJbEnq23cOc2IHOU1eB8kPxPNNInlm4YTzZEA3zf3PusNpZZLxwArPVLCg0QsFQoTYw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -5180,9 +5200,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-darwin-x64": {
|
||||
"version": "1.15.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.3.tgz",
|
||||
"integrity": "sha512-p68OeCz1ui+MZYG4wmfJGvcsAcFYb6Sl25H9TxWl+GkBgmNimIiRdnypK9nBGlqMZAcxngNPtnG3kEMNnvoJ2A==",
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.15.5.tgz",
|
||||
"integrity": "sha512-ufJnz3UAff/8G5OfqZZc5cTQfGtXyXVLTB8TGT0xjkvEbfFg8jZUMDBnZT/Cn0k214JhMjiLCNl0A8aY/OKsYQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -5247,9 +5267,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core-linux-x64-gnu": {
|
||||
"version": "1.15.3",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.3.tgz",
|
||||
"integrity": "sha512-aKttAZnz8YB1VJwPQZtyU8Uk0BfMP63iDMkvjhJzRZVgySmqt/apWSdnoIcZlUoGheBrcqbMC17GGUmur7OT5A==",
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.5.tgz",
|
||||
"integrity": "sha512-98kuPS0lZVgjmc/2uTm39r1/OfwKM0PM13ZllOAWi5avJVjRd/j1xA9rKeUzHDWt+ocH9mTCQsAT1jjKSq45bg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -6032,9 +6052,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "25.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.0.tgz",
|
||||
"integrity": "sha512-rl78HwuZlaDIUSeUKkmogkhebA+8K1Hy7tddZuJ3D0xV8pZSfsYGTsliGUol1JPzu9EKnTxPC4L1fiWouStRew==",
|
||||
"version": "25.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz",
|
||||
"integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -6146,17 +6166,17 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.49.0.tgz",
|
||||
"integrity": "sha512-JXij0vzIaTtCwu6SxTh8qBc66kmf1xs7pI4UOiMDFVct6q86G0Zs7KRcEoJgY3Cav3x5Tq0MF5jwgpgLqgKG3A==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.50.0.tgz",
|
||||
"integrity": "sha512-O7QnmOXYKVtPrfYzMolrCTfkezCJS9+ljLdKW/+DCvRsc3UAz+sbH6Xcsv7p30+0OwUbeWfUDAQE0vpabZ3QLg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "8.49.0",
|
||||
"@typescript-eslint/type-utils": "8.49.0",
|
||||
"@typescript-eslint/utils": "8.49.0",
|
||||
"@typescript-eslint/visitor-keys": "8.49.0",
|
||||
"@typescript-eslint/scope-manager": "8.50.0",
|
||||
"@typescript-eslint/type-utils": "8.50.0",
|
||||
"@typescript-eslint/utils": "8.50.0",
|
||||
"@typescript-eslint/visitor-keys": "8.50.0",
|
||||
"ignore": "^7.0.0",
|
||||
"natural-compare": "^1.4.0",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
@@ -6169,22 +6189,22 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@typescript-eslint/parser": "^8.49.0",
|
||||
"@typescript-eslint/parser": "^8.50.0",
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.49.0.tgz",
|
||||
"integrity": "sha512-N9lBGA9o9aqb1hVMc9hzySbhKibHmB+N3IpoShyV6HyQYRGIhlrO5rQgttypi+yEeKsKI4idxC8Jw6gXKD4THA==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.50.0.tgz",
|
||||
"integrity": "sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "8.49.0",
|
||||
"@typescript-eslint/types": "8.49.0",
|
||||
"@typescript-eslint/typescript-estree": "8.49.0",
|
||||
"@typescript-eslint/visitor-keys": "8.49.0",
|
||||
"@typescript-eslint/scope-manager": "8.50.0",
|
||||
"@typescript-eslint/types": "8.50.0",
|
||||
"@typescript-eslint/typescript-estree": "8.50.0",
|
||||
"@typescript-eslint/visitor-keys": "8.50.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -6200,14 +6220,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/project-service": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.49.0.tgz",
|
||||
"integrity": "sha512-/wJN0/DKkmRUMXjZUXYZpD1NEQzQAAn9QWfGwo+Ai8gnzqH7tvqS7oNVdTjKqOcPyVIdZdyCMoqN66Ia789e7g==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.50.0.tgz",
|
||||
"integrity": "sha512-Cg/nQcL1BcoTijEWyx4mkVC56r8dj44bFDvBdygifuS20f3OZCHmFbjF34DPSi07kwlFvqfv/xOLnJ5DquxSGQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/tsconfig-utils": "^8.49.0",
|
||||
"@typescript-eslint/types": "^8.49.0",
|
||||
"@typescript-eslint/tsconfig-utils": "^8.50.0",
|
||||
"@typescript-eslint/types": "^8.50.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -6222,14 +6242,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.49.0.tgz",
|
||||
"integrity": "sha512-npgS3zi+/30KSOkXNs0LQXtsg9ekZ8OISAOLGWA/ZOEn0ZH74Ginfl7foziV8DT+D98WfQ5Kopwqb/PZOaIJGg==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.50.0.tgz",
|
||||
"integrity": "sha512-xCwfuCZjhIqy7+HKxBLrDVT5q/iq7XBVBXLn57RTIIpelLtEIZHXAF/Upa3+gaCpeV1NNS5Z9A+ID6jn50VD4A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.49.0",
|
||||
"@typescript-eslint/visitor-keys": "8.49.0"
|
||||
"@typescript-eslint/types": "8.50.0",
|
||||
"@typescript-eslint/visitor-keys": "8.50.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -6240,9 +6260,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/tsconfig-utils": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.49.0.tgz",
|
||||
"integrity": "sha512-8prixNi1/6nawsRYxet4YOhnbW+W9FK/bQPxsGB1D3ZrDzbJ5FXw5XmzxZv82X3B+ZccuSxo/X8q9nQ+mFecWA==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.50.0.tgz",
|
||||
"integrity": "sha512-vxd3G/ybKTSlm31MOA96gqvrRGv9RJ7LGtZCn2Vrc5htA0zCDvcMqUkifcjrWNNKXHUU3WCkYOzzVSFBd0wa2w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -6257,15 +6277,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.49.0.tgz",
|
||||
"integrity": "sha512-KTExJfQ+svY8I10P4HdxKzWsvtVnsuCifU5MvXrRwoP2KOlNZ9ADNEWWsQTJgMxLzS5VLQKDjkCT/YzgsnqmZg==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.50.0.tgz",
|
||||
"integrity": "sha512-7OciHT2lKCewR0mFoBrvZJ4AXTMe/sYOe87289WAViOocEmDjjv8MvIOT2XESuKj9jp8u3SZYUSh89QA4S1kQw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.49.0",
|
||||
"@typescript-eslint/typescript-estree": "8.49.0",
|
||||
"@typescript-eslint/utils": "8.49.0",
|
||||
"@typescript-eslint/types": "8.50.0",
|
||||
"@typescript-eslint/typescript-estree": "8.50.0",
|
||||
"@typescript-eslint/utils": "8.50.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
},
|
||||
@@ -6282,9 +6302,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.49.0.tgz",
|
||||
"integrity": "sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.50.0.tgz",
|
||||
"integrity": "sha512-iX1mgmGrXdANhhITbpp2QQM2fGehBse9LbTf0sidWK6yg/NE+uhV5dfU1g6EYPlcReYmkE9QLPq/2irKAmtS9w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -6296,16 +6316,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.49.0.tgz",
|
||||
"integrity": "sha512-jrLdRuAbPfPIdYNppHJ/D0wN+wwNfJ32YTAm10eJVsFmrVpXQnDWBn8niCSMlWjvml8jsce5E/O+86IQtTbJWA==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.50.0.tgz",
|
||||
"integrity": "sha512-W7SVAGBR/IX7zm1t70Yujpbk+zdPq/u4soeFSknWFdXIFuWsBGBOUu/Tn/I6KHSKvSh91OiMuaSnYp3mtPt5IQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/project-service": "8.49.0",
|
||||
"@typescript-eslint/tsconfig-utils": "8.49.0",
|
||||
"@typescript-eslint/types": "8.49.0",
|
||||
"@typescript-eslint/visitor-keys": "8.49.0",
|
||||
"@typescript-eslint/project-service": "8.50.0",
|
||||
"@typescript-eslint/tsconfig-utils": "8.50.0",
|
||||
"@typescript-eslint/types": "8.50.0",
|
||||
"@typescript-eslint/visitor-keys": "8.50.0",
|
||||
"debug": "^4.3.4",
|
||||
"minimatch": "^9.0.4",
|
||||
"semver": "^7.6.0",
|
||||
@@ -6324,16 +6344,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.49.0.tgz",
|
||||
"integrity": "sha512-N3W7rJw7Rw+z1tRsHZbK395TWSYvufBXumYtEGzypgMUthlg0/hmCImeA8hgO2d2G4pd7ftpxxul2J8OdtdaFA==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.50.0.tgz",
|
||||
"integrity": "sha512-87KgUXET09CRjGCi2Ejxy3PULXna63/bMYv72tCAlDJC3Yqwln0HiFJ3VJMst2+mEtNtZu5oFvX4qJGjKsnAgg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.7.0",
|
||||
"@typescript-eslint/scope-manager": "8.49.0",
|
||||
"@typescript-eslint/types": "8.49.0",
|
||||
"@typescript-eslint/typescript-estree": "8.49.0"
|
||||
"@typescript-eslint/scope-manager": "8.50.0",
|
||||
"@typescript-eslint/types": "8.50.0",
|
||||
"@typescript-eslint/typescript-estree": "8.50.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -6348,13 +6368,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.49.0.tgz",
|
||||
"integrity": "sha512-LlKaciDe3GmZFphXIc79THF/YYBugZ7FS1pO581E/edlVVNbZKDy93evqmrfQ9/Y4uN0vVhX4iuchq26mK/iiA==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.50.0.tgz",
|
||||
"integrity": "sha512-Xzmnb58+Db78gT/CCj/PVCvK+zxbnsw6F+O1oheYszJbBSdEjVhQi3C/Xttzxgi/GLmpvOggRs1RFpiJ8+c34Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.49.0",
|
||||
"@typescript-eslint/types": "8.50.0",
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -6372,19 +6392,19 @@
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/@vitejs/plugin-vue": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.2.tgz",
|
||||
"integrity": "sha512-iHmwV3QcVGGvSC1BG5bZ4z6iwa1SOpAPWmnjOErd4Ske+lZua5K9TtAVdx0gMBClJ28DViCbSmZitjWZsWO3LA==",
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-6.0.3.tgz",
|
||||
"integrity": "sha512-TlGPkLFLVOY3T7fZrwdvKpjprR3s4fxRln0ORDo1VQ7HHyxJwTlrjKU3kpVWTlaAjIEuCTokmjkZnr8Tpc925w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@rolldown/pluginutils": "1.0.0-beta.50"
|
||||
"@rolldown/pluginutils": "1.0.0-beta.53"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^20.19.0 || >=22.12.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"vite": "^5.0.0 || ^6.0.0 || ^7.0.0",
|
||||
"vite": "^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0",
|
||||
"vue": "^3.2.25"
|
||||
}
|
||||
},
|
||||
@@ -9986,9 +10006,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint": {
|
||||
"version": "9.39.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz",
|
||||
"integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==",
|
||||
"version": "9.39.2",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.2.tgz",
|
||||
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -9998,7 +10018,7 @@
|
||||
"@eslint/config-helpers": "^0.4.2",
|
||||
"@eslint/core": "^0.17.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "9.39.1",
|
||||
"@eslint/js": "9.39.2",
|
||||
"@eslint/plugin-kit": "^0.4.1",
|
||||
"@humanfs/node": "^0.16.6",
|
||||
"@humanwhocodes/module-importer": "^1.0.1",
|
||||
@@ -17070,12 +17090,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/posthog-js": {
|
||||
"version": "1.304.0",
|
||||
"resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.304.0.tgz",
|
||||
"integrity": "sha512-revqoppmJ5y1Oa9iRUb3P8w1htfxZdrSAe+elSNMxvl7wxY62qWN7Q0kE5Sk81o1qLHa6drPhVKa/dppWOfSUw==",
|
||||
"version": "1.308.0",
|
||||
"resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.308.0.tgz",
|
||||
"integrity": "sha512-6zlYlltfEV8GTSJMbb6+TEIon6fTY/pO0G6HATWnUmBQ06oY9F8PCtGdWQrYgFtJKuXby5Quzz9caN/V8D35XQ==",
|
||||
"license": "SEE LICENSE IN LICENSE",
|
||||
"dependencies": {
|
||||
"@posthog/core": "1.7.1",
|
||||
"@posthog/core": "1.8.0",
|
||||
"core-js": "^3.38.1",
|
||||
"fflate": "^0.4.8",
|
||||
"preact": "^10.19.3",
|
||||
@@ -18179,9 +18199,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/rolldown-vite": {
|
||||
"version": "7.2.10",
|
||||
"resolved": "https://registry.npmjs.org/rolldown-vite/-/rolldown-vite-7.2.10.tgz",
|
||||
"integrity": "sha512-v2ekZjuVLfumjp1Cr7LSQM1n2oOo3+gMruhOgT0Q4/cQ2J3nkTDLTAWLQQ86UHMbFYyVIN1wGh8BEZbvjkyctg==",
|
||||
"version": "7.2.11",
|
||||
"resolved": "https://registry.npmjs.org/rolldown-vite/-/rolldown-vite-7.2.11.tgz",
|
||||
"integrity": "sha512-WwCantGLbztBNipg+WwcA+a1c3Mo9LPY0VZ35IFXnUsQyZzsMHtzmy+H5PqELPj3AOauI9L/HMCjoJZp3i9eFg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -18495,13 +18515,6 @@
|
||||
"node": "^20.19.0 || >=22.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/rolldown-vite/node_modules/@rolldown/pluginutils": {
|
||||
"version": "1.0.0-beta.53",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.53.tgz",
|
||||
"integrity": "sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/rolldown-vite/node_modules/fdir": {
|
||||
"version": "6.5.0",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
|
||||
@@ -18768,9 +18781,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/sass": {
|
||||
"version": "1.96.0",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.96.0.tgz",
|
||||
"integrity": "sha512-8u4xqqUeugGNCYwr9ARNtQKTOj4KmYiJAVKXf2CTIivTCR51j96htbMKWDru8H5SaQWpyVgTfOF8Ylyf5pun1Q==",
|
||||
"version": "1.97.0",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.97.0.tgz",
|
||||
"integrity": "sha512-KR0igP1z4avUJetEuIeOdDlwaUDvkH8wSx7FdSjyYBS3dpyX3TzHfAMO0G1Q4/3cdjcmi3r7idh+KCmKqS+KeQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -18874,55 +18887,37 @@
|
||||
}
|
||||
},
|
||||
"node_modules/shiki": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/shiki/-/shiki-3.19.0.tgz",
|
||||
"integrity": "sha512-77VJr3OR/VUZzPiStyRhADmO2jApMM0V2b1qf0RpfWya8Zr1PeZev5AEpPGAAKWdiYUtcZGBE4F5QvJml1PvWA==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/shiki/-/shiki-3.20.0.tgz",
|
||||
"integrity": "sha512-kgCOlsnyWb+p0WU+01RjkCH+eBVsjL1jOwUYWv0YDWkM2/A46+LDKVs5yZCUXjJG6bj4ndFoAg5iLIIue6dulg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/core": "3.19.0",
|
||||
"@shikijs/engine-javascript": "3.19.0",
|
||||
"@shikijs/engine-oniguruma": "3.19.0",
|
||||
"@shikijs/langs": "3.19.0",
|
||||
"@shikijs/themes": "3.19.0",
|
||||
"@shikijs/types": "3.19.0",
|
||||
"@shikijs/core": "3.20.0",
|
||||
"@shikijs/engine-javascript": "3.20.0",
|
||||
"@shikijs/engine-oniguruma": "3.20.0",
|
||||
"@shikijs/langs": "3.20.0",
|
||||
"@shikijs/themes": "3.20.0",
|
||||
"@shikijs/types": "3.20.0",
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
"@types/hast": "^3.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/shiki/node_modules/@shikijs/core": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.19.0.tgz",
|
||||
"integrity": "sha512-L7SrRibU7ZoYi1/TrZsJOFAnnHyLTE1SwHG1yNWjZIVCqjOEmCSuK2ZO9thnRbJG6TOkPp+Z963JmpCNw5nzvA==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/core/-/core-3.20.0.tgz",
|
||||
"integrity": "sha512-f2ED7HYV4JEk827mtMDwe/yQ25pRiXZmtHjWF8uzZKuKiEsJR7Ce1nuQ+HhV9FzDcbIo4ObBCD9GPTzNuy9S1g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/types": "3.19.0",
|
||||
"@shikijs/types": "3.20.0",
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
"@types/hast": "^3.0.4",
|
||||
"hast-util-to-html": "^9.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/shiki/node_modules/@shikijs/langs": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-3.19.0.tgz",
|
||||
"integrity": "sha512-dBMFzzg1QiXqCVQ5ONc0z2ebyoi5BKz+MtfByLm0o5/nbUu3Iz8uaTCa5uzGiscQKm7lVShfZHU1+OG3t5hgwg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/types": "3.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/shiki/node_modules/@shikijs/themes": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-3.19.0.tgz",
|
||||
"integrity": "sha512-H36qw+oh91Y0s6OlFfdSuQ0Ld+5CgB/VE6gNPK+Hk4VRbVG/XQgkjnt4KzfnnoO6tZPtKJKHPjwebOCfjd6F8A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/types": "3.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/shiki/node_modules/@shikijs/types": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.19.0.tgz",
|
||||
"integrity": "sha512-Z2hdeEQlzuntf/BZpFG8a+Fsw9UVXdML7w0o3TgSXV3yNESGon+bs9ITkQb3Ki7zxoXOOu5oJWqZ2uto06V9iQ==",
|
||||
"version": "3.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@shikijs/types/-/types-3.20.0.tgz",
|
||||
"integrity": "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@shikijs/vscode-textmate": "^10.0.2",
|
||||
@@ -20112,16 +20107,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript-eslint": {
|
||||
"version": "8.49.0",
|
||||
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.49.0.tgz",
|
||||
"integrity": "sha512-zRSVH1WXD0uXczCXw+nsdjGPUdx4dfrs5VQoHnUWmv1U3oNlAKv4FUNdLDhVUg+gYn+a5hUESqch//Rv5wVhrg==",
|
||||
"version": "8.50.0",
|
||||
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.50.0.tgz",
|
||||
"integrity": "sha512-Q1/6yNUmCpH94fbgMUMg2/BSAr/6U7GBk61kZTv1/asghQOWOjTlp9K8mixS5NcJmm2creY+UFfGeW/+OcA64A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "8.49.0",
|
||||
"@typescript-eslint/parser": "8.49.0",
|
||||
"@typescript-eslint/typescript-estree": "8.49.0",
|
||||
"@typescript-eslint/utils": "8.49.0"
|
||||
"@typescript-eslint/eslint-plugin": "8.50.0",
|
||||
"@typescript-eslint/parser": "8.50.0",
|
||||
"@typescript-eslint/typescript-estree": "8.50.0",
|
||||
"@typescript-eslint/utils": "8.50.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -21117,9 +21112,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vue-router": {
|
||||
"version": "4.6.3",
|
||||
"resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.3.tgz",
|
||||
"integrity": "sha512-ARBedLm9YlbvQomnmq91Os7ck6efydTSpRP3nuOKCvgJOHNrhRoJDSKtee8kcL1Vf7nz6U+PMBL+hTvR3bTVQg==",
|
||||
"version": "4.6.4",
|
||||
"resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.4.tgz",
|
||||
"integrity": "sha512-Hz9q5sa33Yhduglwz6g9skT8OBPii+4bFn88w6J+J4MfEo4KRRpmiNG/hHHkdbRFlLBOqxN8y8gf2Fb0MTUgVg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vue/devtools-api": "^6.6.4"
|
||||
|
||||
@@ -59,17 +59,17 @@
|
||||
"path-browserify": "^1.0.1",
|
||||
"pdfjs-dist": "^5.4.449",
|
||||
"pinia": "^3.0.4",
|
||||
"posthog-js": "^1.304.0",
|
||||
"posthog-js": "^1.308.0",
|
||||
"rapidoc": "^9.3.8",
|
||||
"semver": "^7.7.3",
|
||||
"shiki": "^3.19.0",
|
||||
"shiki": "^3.20.0",
|
||||
"vue": "^3.5.25",
|
||||
"vue-axios": "^3.5.2",
|
||||
"vue-chartjs": "^5.3.3",
|
||||
"vue-gtag": "^3.6.3",
|
||||
"vue-i18n": "^11.2.2",
|
||||
"vue-material-design-icons": "^5.3.1",
|
||||
"vue-router": "^4.6.3",
|
||||
"vue-router": "^4.6.4",
|
||||
"vue-sidebar-menu": "^5.9.1",
|
||||
"vue-virtual-scroller": "^2.0.0-beta.8",
|
||||
"vue3-popper": "^1.5.0",
|
||||
@@ -80,10 +80,10 @@
|
||||
"devDependencies": {
|
||||
"@codecov/vite-plugin": "^1.9.1",
|
||||
"@esbuild-plugins/node-modules-polyfill": "^0.2.2",
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@playwright/test": "^1.57.0",
|
||||
"@rushstack/eslint-patch": "^1.14.1",
|
||||
"@shikijs/markdown-it": "^3.19.0",
|
||||
"@shikijs/markdown-it": "^3.20.0",
|
||||
"@storybook/addon-themes": "^9.1.16",
|
||||
"@storybook/addon-vitest": "^9.1.16",
|
||||
"@storybook/test-runner": "^0.23.0",
|
||||
@@ -91,14 +91,14 @@
|
||||
"@types/humanize-duration": "^3.27.4",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/moment": "^2.13.0",
|
||||
"@types/node": "^25.0.0",
|
||||
"@types/node": "^25.0.3",
|
||||
"@types/nprogress": "^0.2.3",
|
||||
"@types/path-browserify": "^1.0.3",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/testing-library__jest-dom": "^6.0.0",
|
||||
"@types/testing-library__user-event": "^4.2.0",
|
||||
"@typescript-eslint/parser": "^8.49.0",
|
||||
"@vitejs/plugin-vue": "^6.0.2",
|
||||
"@typescript-eslint/parser": "^8.50.0",
|
||||
"@vitejs/plugin-vue": "^6.0.3",
|
||||
"@vitejs/plugin-vue-jsx": "^5.1.2",
|
||||
"@vitest/browser": "^3.2.4",
|
||||
"@vitest/coverage-v8": "^3.2.4",
|
||||
@@ -107,7 +107,7 @@
|
||||
"@vueuse/router": "^14.1.0",
|
||||
"change-case": "5.4.4",
|
||||
"cross-env": "^10.1.0",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-plugin-storybook": "^9.1.16",
|
||||
"eslint-plugin-vue": "^9.33.0",
|
||||
"globals": "^16.5.0",
|
||||
@@ -120,29 +120,29 @@
|
||||
"playwright": "^1.55.0",
|
||||
"prettier": "^3.7.4",
|
||||
"rimraf": "^6.1.2",
|
||||
"rolldown-vite": "^7.2.10",
|
||||
"rolldown-vite": "^7.2.11",
|
||||
"rollup-plugin-copy": "^3.5.0",
|
||||
"sass": "^1.96.0",
|
||||
"sass": "^1.97.0",
|
||||
"storybook": "^9.1.16",
|
||||
"storybook-vue3-router": "^6.0.2",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.49.0",
|
||||
"typescript-eslint": "^8.50.0",
|
||||
"uuid": "^13.0.0",
|
||||
"vite": "npm:rolldown-vite@latest",
|
||||
"vitest": "^3.2.4",
|
||||
"vue-tsc": "^3.1.8"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@esbuild/darwin-arm64": "^0.27.1",
|
||||
"@esbuild/darwin-x64": "^0.27.1",
|
||||
"@esbuild/linux-x64": "^0.27.1",
|
||||
"@rollup/rollup-darwin-arm64": "^4.53.3",
|
||||
"@rollup/rollup-darwin-x64": "^4.53.3",
|
||||
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
|
||||
"@swc/core-darwin-arm64": "^1.15.3",
|
||||
"@swc/core-darwin-x64": "^1.15.3",
|
||||
"@swc/core-linux-x64-gnu": "^1.15.3"
|
||||
"@esbuild/darwin-arm64": "^0.27.2",
|
||||
"@esbuild/darwin-x64": "^0.27.2",
|
||||
"@esbuild/linux-x64": "^0.27.2",
|
||||
"@rollup/rollup-darwin-arm64": "^4.53.5",
|
||||
"@rollup/rollup-darwin-x64": "^4.53.5",
|
||||
"@rollup/rollup-linux-x64-gnu": "^4.53.5",
|
||||
"@swc/core-darwin-arm64": "^1.15.5",
|
||||
"@swc/core-darwin-x64": "^1.15.5",
|
||||
"@swc/core-linux-x64-gnu": "^1.15.5"
|
||||
},
|
||||
"overrides": {
|
||||
"bootstrap": {
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
import {ref, computed, watch, onMounted, nextTick, useAttrs} from "vue";
|
||||
import {useRoute} from "vue-router";
|
||||
import EnterpriseBadge from "./EnterpriseBadge.vue";
|
||||
import BlueprintDetail from "./flows/blueprints/BlueprintDetail.vue";
|
||||
import BlueprintDetail from "../override/components/flows/blueprints/BlueprintDetail.vue";
|
||||
|
||||
interface Tab {
|
||||
name?: string;
|
||||
|
||||
@@ -204,24 +204,26 @@
|
||||
className="row-action"
|
||||
>
|
||||
<template #default="scope">
|
||||
<el-button v-if="scope.row.executionId || scope.row.evaluateRunningDate">
|
||||
<Kicon
|
||||
:tooltip="$t(`unlock trigger.tooltip.${scope.row.executionId ? 'execution' : 'evaluation'}`)"
|
||||
placement="left"
|
||||
@click="triggerToUnlock = scope.row"
|
||||
>
|
||||
<LockOff />
|
||||
</Kicon>
|
||||
</el-button>
|
||||
<el-button>
|
||||
<Kicon
|
||||
:tooltip="$t('delete trigger')"
|
||||
placement="left"
|
||||
@click="confirmDeleteTrigger(scope.row)"
|
||||
>
|
||||
<Delete />
|
||||
</Kicon>
|
||||
</el-button>
|
||||
<div class="action-container">
|
||||
<el-button v-if="scope.row.executionId || scope.row.evaluateRunningDate">
|
||||
<Kicon
|
||||
:tooltip="$t(`unlock trigger.tooltip.${scope.row.executionId ? 'execution' : 'evaluation'}`)"
|
||||
placement="left"
|
||||
@click="triggerToUnlock = scope.row"
|
||||
>
|
||||
<LockOff />
|
||||
</Kicon>
|
||||
</el-button>
|
||||
<el-button>
|
||||
<Kicon
|
||||
:tooltip="$t('delete trigger')"
|
||||
placement="left"
|
||||
@click="confirmDeleteTrigger(scope.row)"
|
||||
>
|
||||
<Delete />
|
||||
</Kicon>
|
||||
</el-button>
|
||||
</div>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column :label="$t('backfill')" columnKey="backfill">
|
||||
@@ -855,6 +857,12 @@
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.action-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.statusIcon {
|
||||
font-size: large;
|
||||
}
|
||||
@@ -927,4 +935,4 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
@@ -25,22 +25,18 @@ export function applyDefaultFilters(
|
||||
includeScope,
|
||||
legacyQuery,
|
||||
}: DefaultFilterOptions = {}): { query: LocationQuery, change: boolean } {
|
||||
|
||||
if(currentQuery && Object.keys(currentQuery).length > 0) {
|
||||
return {
|
||||
query: currentQuery,
|
||||
change: false,
|
||||
}
|
||||
}
|
||||
|
||||
const query = {...currentQuery};
|
||||
let change = false;
|
||||
|
||||
if (namespace === undefined && defaultNamespace() && !hasFilterKey(query, NAMESPACE_FILTER_PREFIX)) {
|
||||
query[legacyQuery ? "namespace" : `${NAMESPACE_FILTER_PREFIX}[PREFIX]`] = defaultNamespace();
|
||||
change = true;
|
||||
}
|
||||
|
||||
if (includeScope && !hasFilterKey(query, SCOPE_FILTER_PREFIX)) {
|
||||
query[legacyQuery ? "scope" : `${SCOPE_FILTER_PREFIX}[EQUALS]`] = "USER";
|
||||
change = true;
|
||||
}
|
||||
|
||||
const TIME_FILTER_KEYS = /startDate|endDate|timeRange/;
|
||||
@@ -48,9 +44,10 @@ export function applyDefaultFilters(
|
||||
if (includeTimeRange && !Object.keys(query).some(key => TIME_FILTER_KEYS.test(key))) {
|
||||
const defaultDuration = useMiscStore().configs?.chartDefaultDuration ?? "P30D";
|
||||
query[legacyQuery ? "timeRange" : `${TIME_RANGE_FILTER_PREFIX}[EQUALS]`] = defaultDuration;
|
||||
change = true;
|
||||
}
|
||||
|
||||
return {query, change: true};
|
||||
return {query, change};
|
||||
}
|
||||
|
||||
export function useDefaultFilter(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user