Compare commits

..

1 Commits

Author SHA1 Message Date
Roman Acevedo
9575cc1c87 fake commit to run flaky tests 2025-12-05 18:07:27 +01:00
104 changed files with 1039 additions and 1274 deletions

View File

@@ -64,7 +64,6 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
publish-develop-maven: publish-develop-maven:

View File

@@ -32,4 +32,3 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}

View File

@@ -21,7 +21,7 @@ plugins {
// test // test
id "com.adarshr.test-logger" version "4.0.0" id "com.adarshr.test-logger" version "4.0.0"
id "org.sonarqube" version "7.2.0.6526" id "org.sonarqube" version "7.1.0.6387"
id 'jacoco-report-aggregation' id 'jacoco-report-aggregation'
// helper // helper

View File

@@ -4,16 +4,13 @@ import io.kestra.core.utils.MapUtils;
import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.annotation.Nullable; import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Pattern;
import java.util.*; import java.util.*;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.") @Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
public record Label( public record Label(@NotEmpty String key, @NotEmpty String value) {
@NotEmpty @Pattern(regexp = "^[\\p{Ll}][\\p{L}0-9._-]*$", message = "Invalid label key. A valid key contains only lowercase letters numbers hyphens (-) underscores (_) or periods (.) and must begin with a lowercase letter.") String key,
@NotEmpty String value) {
public static final String SYSTEM_PREFIX = "system."; public static final String SYSTEM_PREFIX = "system.";
// system labels // system labels

View File

@@ -94,7 +94,7 @@ public record QueryFilter(
KIND("kind") { KIND("kind") {
@Override @Override
public List<Op> supportedOp() { public List<Op> supportedOp() {
return List.of(Op.EQUALS,Op.NOT_EQUALS, Op.IN, Op.NOT_IN); return List.of(Op.EQUALS,Op.NOT_EQUALS);
} }
}, },
LABELS("labels") { LABELS("labels") {
@@ -106,7 +106,7 @@ public record QueryFilter(
FLOW_ID("flowId") { FLOW_ID("flowId") {
@Override @Override
public List<Op> supportedOp() { public List<Op> supportedOp() {
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX); return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
} }
}, },
UPDATED("updated") { UPDATED("updated") {
@@ -226,7 +226,7 @@ public record QueryFilter(
FLOW { FLOW {
@Override @Override
public List<Field> supportedField() { public List<Field> supportedField() {
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE, Field.FLOW_ID); return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
} }
}, },
NAMESPACE { NAMESPACE {
@@ -241,7 +241,7 @@ public record QueryFilter(
return List.of( return List.of(
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE, Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER, Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
Field.NAMESPACE, Field.KIND Field.NAMESPACE,Field.KIND
); );
} }
}, },

View File

@@ -267,10 +267,6 @@ public class State {
return this == Type.RUNNING || this == Type.KILLING; return this == Type.RUNNING || this == Type.KILLING;
} }
public boolean onlyRunning() {
return this == Type.RUNNING;
}
public boolean isFailed() { public boolean isFailed() {
return this == Type.FAILED; return this == Type.FAILED;
} }

View File

@@ -93,7 +93,7 @@ public class Property<T> {
* @return a new {@link Property} without a pre-rendered value * @return a new {@link Property} without a pre-rendered value
*/ */
public Property<T> skipCache() { public Property<T> skipCache() {
return new Property<>(expression, true); return Property.ofExpression(expression);
} }
/** /**

View File

@@ -158,7 +158,11 @@ public class FlowInputOutput {
File tempFile = File.createTempFile(prefix, fileExtension); File tempFile = File.createTempFile(prefix, fileExtension);
try (var inputStream = fileUpload.getInputStream(); try (var inputStream = fileUpload.getInputStream();
var outputStream = new FileOutputStream(tempFile)) { var outputStream = new FileOutputStream(tempFile)) {
inputStream.transferTo(outputStream); long transferredBytes = inputStream.transferTo(outputStream);
if (transferredBytes == 0) {
sink.error(new KestraRuntimeException("Can't upload file: " + fileUpload.getFilename()));
return;
}
URI from = storageInterface.from(execution, inputId, fileName, tempFile); URI from = storageInterface.from(execution, inputId, fileName, tempFile);
sink.next(Map.entry(inputId, from.toString())); sink.next(Map.entry(inputId, from.toString()));
} finally { } finally {
@@ -378,11 +382,11 @@ public class FlowInputOutput {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException { private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.as((Property<T>) input.getDefaults().skipCache(), renderer, clazz); return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException { private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.asList((Property<List<T>>) input.getDefaults().skipCache(), renderer, clazz); return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
} }
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) { private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
@@ -498,8 +502,8 @@ public class FlowInputOutput {
yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString())); yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString()));
} }
} }
case JSON -> (current instanceof Map || current instanceof Collection<?>) ? current : JacksonMapper.toObject(current.toString()); case JSON -> JacksonMapper.toObject(current.toString());
case YAML -> (current instanceof Map || current instanceof Collection<?>) ? current : YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE); case YAML -> YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
case URI -> { case URI -> {
Matcher matcher = URI_PATTERN.matcher(current.toString()); Matcher matcher = URI_PATTERN.matcher(current.toString());
if (matcher.matches()) { if (matcher.matches()) {

View File

@@ -65,9 +65,10 @@ public class ListUtils {
} }
public static List<String> convertToListString(Object object){ public static List<String> convertToListString(Object object){
return convertToList(object) if (object instanceof List<?> list && (list.isEmpty() || list.getFirst() instanceof String)) {
.stream() return (List<String>) list;
.map(Object::toString) } else {
.toList(); throw new IllegalArgumentException("%s in not an instance of List of String".formatted(object));
}
} }
} }

View File

@@ -32,12 +32,10 @@ public class NamespaceFilesUtils {
private ExecutorsUtils executorsUtils; private ExecutorsUtils executorsUtils;
private ExecutorService executorService; private ExecutorService executorService;
private int maxThreads;
@PostConstruct @PostConstruct
public void postConstruct() { public void postConstruct() {
this.maxThreads = Math.max(Runtime.getRuntime().availableProcessors() * 4, 32); this.executorService = executorsUtils.maxCachedThreadPool(Math.max(Runtime.getRuntime().availableProcessors() * 4, 32), "namespace-file");
this.executorService = executorsUtils.maxCachedThreadPool(maxThreads, "namespace-file");
} }
public void loadNamespaceFiles( public void loadNamespaceFiles(
@@ -65,11 +63,7 @@ public class NamespaceFilesUtils {
matchedNamespaceFiles.addAll(files); matchedNamespaceFiles.addAll(files);
} }
// Use half of the available threads to avoid impacting concurrent tasks
int parallelism = maxThreads / 2;
Flux.fromIterable(matchedNamespaceFiles) Flux.fromIterable(matchedNamespaceFiles)
.parallel(parallelism)
.runOn(Schedulers.fromExecutorService(executorService))
.doOnNext(throwConsumer(nsFile -> { .doOnNext(throwConsumer(nsFile -> {
InputStream content = runContext.storage().getFile(nsFile.uri()); InputStream content = runContext.storage().getFile(nsFile.uri());
Path path = folderPerNamespace ? Path path = folderPerNamespace ?
@@ -77,7 +71,7 @@ public class NamespaceFilesUtils {
Path.of(nsFile.path()); Path.of(nsFile.path());
runContext.workingDir().putFile(path, content, fileExistComportment); runContext.workingDir().putFile(path, content, fileExistComportment);
})) }))
.sequential() .publishOn(Schedulers.fromExecutorService(executorService))
.blockLast(); .blockLast();
Duration duration = stopWatch.getDuration(); Duration duration = stopWatch.getDuration();

View File

@@ -157,7 +157,7 @@ public class LoopUntil extends Task implements FlowableTask<LoopUntil.Output> {
public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException { public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException {
if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) { if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) {
String continueLoop = runContext.render(this.condition).skipCache().as(String.class).orElse(null); String continueLoop = runContext.render(this.condition).as(String.class).orElse(null);
if (!TruthUtils.isTruthy(continueLoop)) { if (!TruthUtils.isTruthy(continueLoop)) {
return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow()); return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow());
} }

View File

@@ -123,7 +123,7 @@ public class Switch extends Task implements FlowableTask<Switch.Output> {
} }
private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException { private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException {
return runContext.render(this.value).skipCache().as(String.class).orElseThrow(); return runContext.render(this.value).as(String.class).orElseThrow();
} }
@Override @Override

View File

@@ -32,17 +32,9 @@ import lombok.experimental.SuperBuilder;
examples = { examples = {
@Example( @Example(
code = """ code = """
id: templated_task
namespace: company.team
variables:
property: uri
value: https://kestra.io
tasks:
- id: templated_task
type: io.kestra.plugin.core.templating.TemplatedTask
spec: | spec: |
type: io.kestra.plugin.core.http.Download type: io.kestra.plugin.core.http.Download
{{ vars.property }}: {{ vars.value }} {{ task.property }}: {{ task.value }}
""" """
) )
}, },

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.chart
name: "chart"
title: "Chart"
description: "Tasks that render dashboard charts from Kestra data sources."
body: "Use these chart widgets to visualize metrics, executions, or flow trends in dashboards; pair them with dashboard data queries and configure aggregations, groupings, and chart options for Bar, Pie, Time Series, KPI, or Table outputs."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.condition
name: "condition"
title: "Condition"
description: "Tasks that evaluate conditions to control flow execution or triggers."
body: "Use these predicates to gate tasks or triggers based on time windows, calendars, execution metadata, labels, namespaces, retries, or custom expressions; configure required parameters such as allowed states, namespaces, date ranges, or JEXL expressions to return a true/false result."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.data
name: "data"
title: "Data"
description: "Tasks that fetch Kestra executions, flows, logs, metrics, and triggers as datasets for dashboards."
body: "These data providers query Kestra repositories with filters and aggregations to feed dashboard charts; configure columns and fields (such as namespace, state, timestamp, or labels) plus any filters to shape the returned dataset for visualization."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.debug
name: "debug"
title: "Debug"
description: "Tasks that emit debug output while you develop a flow."
body: "Echo and Return help inspect variables and payloads or short-circuit execution during testing; provide the message or value to output so downstream tasks can see exactly what is being passed around."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.execution
name: "execution"
title: "Execution"
description: "Tasks that manage the lifecycle and context of a running execution."
body: "Use these tasks to assert expectations, set or unset variables, add labels, fail, exit, resume, or purge executions; supply required properties such as variable maps, label key/values, or retention rules before altering execution state."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.flow
name: "flow"
title: "Flow"
description: "Tasks that orchestrate control flow within a Kestra pipeline."
body: "Sequence, branch, loop, parallelize, or nest subflows/templates using these primitives; define embedded task lists, values for switches, iteration collections, working directories, and loop exit criteria to structure complex workflows cleanly."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.http
name: "http"
title: "HTTP"
description: "Tasks that interact with HTTP endpoints."
body: "Perform requests, downloads, or webhook triggers with configurable methods, headers, authentication, and payloads; provide the target URI plus any body or query parameters, and use response handling options to store results for downstream tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core
name: "core"
title: "Core Plugins and tasks"
description: "Tasks that provide Kestra's built-in orchestration, I/O, and observability capabilities."
body: "Core plugins cover control-flow, execution management, triggers, storage, HTTP, metrics, logging, templating, and dashboard widgets; combine these foundational tasks to build reliable workflows without adding external dependencies."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.kv
name: "kv"
title: "KV"
description: "Tasks that manage key-value pairs in Kestra's KV store."
body: "Set, get, list, version, and delete namespaced keys to share state across flows; specify the key path, value for writes, and optional namespace or TTL to control how data is stored, retrieved, and purged."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.log
name: "log"
title: "Log"
description: "Tasks that write, fetch, or purge Kestra logs."
body: "Emit structured log messages, retrieve stored logs, or clean up log storage; provide message content or log query filters and consider namespace or execution scoping when purging."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.metric
name: "metric"
title: "Metric"
description: "Tasks that publish custom metrics from flows."
body: "Send counters, gauges, and timing metrics to Kestra's metric store for dashboards and alerts; define the metric name, type, value, labels, and optional timestamp to record meaningful telemetry."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.namespace
name: "namespace"
title: "Namespace"
description: "Tasks that manage namespace files and versions."
body: "Upload, download, delete, purge, or version files stored in a namespace—useful for shipping assets or configs with flows; set the target namespace, paths or glob patterns, and purge behavior to control stored artifacts."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.output
name: "output"
title: "Output"
description: "Tasks that expose outputs from a flow."
body: "Use OutputValues to publish key-value outputs for downstream tasks or subflows; declare the output map and data types that consuming tasks should read."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.runner
name: "runner"
title: "Runner"
description: "Tasks that execute commands on the Kestra worker."
body: "Run shell processes with configurable command, environment, working directory, and input/output handling; ensure commands are idempotent and set expected exit codes or resource needs when invoking external binaries."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.storage
name: "storage"
title: "Storage"
description: "Tasks that manipulate files in Kestra's internal storage."
body: "Write, delete, concatenate, split, deduplicate, filter, reverse, size, or list files used by executions; provide source and target storage URIs and any encoding or line-handling options to transform stored data safely."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.templating
name: "templating"
title: "Templating"
description: "Tasks that render dynamic task specifications from templates."
body: "TemplatedTask lets you supply a Pebble-rendered YAML spec that is parsed and executed at runtime; provide the `spec` property with a valid runnable task definition and avoid recursive templating when composing dynamic tasks."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -1,8 +0,0 @@
group: io.kestra.plugin.core.trigger
name: "trigger"
title: "Trigger"
description: "Tasks that start flows from schedules or events."
body: "Define cron-based schedules, specific date triggers, webhooks, namespace flow triggers, or toggles; set required properties like cron expressions, webhook secrets, and target flow references to control when executions fire."
videos: []
createdBy: "Kestra Core Team"
managedBy: "Kestra Core Team"

View File

@@ -134,47 +134,4 @@ class LabelTest {
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar")); Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
assertThat(emptyKeyLabelResult.isPresent()).isTrue(); assertThat(emptyKeyLabelResult.isPresent()).isTrue();
} }
@Test
void shouldValidateValidLabelKeys() {
// Valid keys: start with lowercase; may contain letters, numbers, hyphens, underscores, periods
assertThat(modelValidator.isValid(new Label("foo", "bar")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo_bar", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo-bar_baz123", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("a", "value")).isPresent()).isFalse();
assertThat(modelValidator.isValid(new Label("foo.bar", "value")).isPresent()).isFalse(); // dot is allowed
}
@Test
void shouldRejectInvalidLabelKeys() {
Optional<ConstraintViolationException> spaceResult = modelValidator.isValid(new Label("foo bar", "value"));
assertThat(spaceResult.isPresent()).isTrue();
Optional<ConstraintViolationException> uppercaseResult = modelValidator.isValid(new Label("Foo", "value"));
assertThat(uppercaseResult.isPresent()).isTrue();
Optional<ConstraintViolationException> emojiResult = modelValidator.isValid(new Label("💩", "value"));
assertThat(emojiResult.isPresent()).isTrue();
Optional<ConstraintViolationException> atSignResult = modelValidator.isValid(new Label("foo@bar", "value"));
assertThat(atSignResult.isPresent()).isTrue();
Optional<ConstraintViolationException> colonResult = modelValidator.isValid(new Label("foo:bar", "value"));
assertThat(colonResult.isPresent()).isTrue();
Optional<ConstraintViolationException> hyphenStartResult = modelValidator.isValid(new Label("-foo", "value"));
assertThat(hyphenStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> underscoreStartResult = modelValidator.isValid(new Label("_foo", "value"));
assertThat(underscoreStartResult.isPresent()).isTrue();
Optional<ConstraintViolationException> zeroResult = modelValidator.isValid(new Label("0", "value"));
assertThat(zeroResult.isPresent()).isTrue();
Optional<ConstraintViolationException> digitStartResult = modelValidator.isValid(new Label("9test", "value"));
assertThat(digitStartResult.isPresent()).isTrue();
}
} }

View File

@@ -61,9 +61,6 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION),
@@ -171,6 +168,9 @@ public class QueryFilterTest {
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION), Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION),

View File

@@ -185,21 +185,4 @@ class FlowTest {
return YamlParser.parse(file, Flow.class); return YamlParser.parse(file, Flow.class);
} }
@Test
void illegalNamespaceUpdate() {
Flow original = Flow.builder()
.id("my-flow")
.namespace("io.kestra.prod")
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("hello").build()))
.build();
Flow updated = original.toBuilder()
.namespace("io.kestra.dev")
.build();
Optional<ConstraintViolationException> validate = original.validateUpdate(updated);
assertThat(validate.isPresent()).isTrue();
assertThat(validate.get().getMessage()).contains("Illegal namespace update");
}
} }

View File

@@ -52,8 +52,8 @@ import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static io.kestra.core.models.flows.FlowScope.SYSTEM;
import static io.kestra.core.models.flows.FlowScope.USER; import static io.kestra.core.models.flows.FlowScope.USER;
import static java.time.temporal.ChronoUnit.MINUTES;
import static java.time.temporal.ChronoUnit.SECONDS; import static java.time.temporal.ChronoUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -81,7 +81,6 @@ public abstract class AbstractExecutionRepositoryTest {
.tenantId(tenantId) .tenantId(tenantId)
.flowId(flowId == null ? FLOW : flowId) .flowId(flowId == null ? FLOW : flowId)
.flowRevision(1) .flowRevision(1)
.kind(ExecutionKind.NORMAL)
.state(finalState); .state(finalState);
@@ -197,49 +196,15 @@ public abstract class AbstractExecutionRepositoryTest {
static Stream<Arguments> filterCombinations() { static Stream<Arguments> filterCombinations() {
return Stream.of( return Stream.of(
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unittest").operation(Op.EQUALS).build(), 29), Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unittest").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unused").operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(USER)).operation(Op.EQUALS).build(), 29), Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(USER)).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.unittest").operation(Op.EQUALS).build(), 29), Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.unittest").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("not.this.one").operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittes").operation(Op.CONTAINS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.uni").operation(Op.STARTS_WITH).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittest").operation(Op.ENDS_WITH).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io\\.kestra\\.unittest").operation(Op.REGEX).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("io.kestra.unittest", "unused")).operation(Op.IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("unused.first", "unused.second")).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra").operation(Op.PREFIX).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.NORMAL).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.TEST).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.NORMAL, ExecutionKind.PLAYGROUND)).operation(Op.IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.PLAYGROUND, ExecutionKind.TEST)).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), 1), Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "unknown")).operation(Op.NOT_EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value", "key2", "value2")).operation(Op.IN).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key1", "value1")).operation(Op.NOT_IN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.LABELS).value("value").operation(Op.CONTAINS).build(), 1),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.EQUALS).build(), 16), Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.EQUALS).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.NOT_EQUALS).build(), 13),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ul").operation(Op.CONTAINS).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.STARTS_WITH).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ull").operation(Op.ENDS_WITH).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("[ful]{4}").operation(Op.REGEX).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other")).operation(Op.IN).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other2")).operation(Op.NOT_IN).build(), 13),
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.PREFIX).build(), 16),
Arguments.of(QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), 29), Arguments.of(QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), 29), Arguments.of(QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.STATE).value(Type.RUNNING).operation(Op.EQUALS).build(), 5), Arguments.of(QueryFilter.builder().field(Field.STATE).value(Type.RUNNING).operation(Op.EQUALS).build(), 5),
Arguments.of(QueryFilter.builder().field(Field.TRIGGER_EXECUTION_ID).value("executionTriggerId").operation(Op.EQUALS).build(), 29), Arguments.of(QueryFilter.builder().field(Field.TRIGGER_EXECUTION_ID).value("executionTriggerId").operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29)
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29),
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.NOT_EQUALS).build(), 0)
); );
} }
@@ -691,65 +656,6 @@ public abstract class AbstractExecutionRepositoryTest {
assertThat(data).first().hasFieldOrPropertyWithValue("id", execution.getId()); assertThat(data).first().hasFieldOrPropertyWithValue("id", execution.getId());
} }
@Test
void dashboard_fetchData_365Days_verifiesDateGrouping() throws IOException {
var tenantId = TestsUtils.randomTenant(this.getClass().getSimpleName());
var executionDuration = Duration.ofMinutes(220);
var executionCreateDate = Instant.now();
// Create an execution within the 365-day range
Execution execution = Execution.builder()
.tenantId(tenantId)
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.flowId("some-execution")
.flowRevision(1)
.labels(Label.from(Map.of("country", "FR")))
.state(new State(Type.SUCCESS,
List.of(new State.History(State.Type.CREATED, executionCreateDate), new State.History(Type.SUCCESS, executionCreateDate.plus(executionDuration)))))
.taskRunList(List.of())
.build();
execution = executionRepository.save(execution);
// Create an execution BEYOND 365 days (400 days ago) - should be filtered out
var executionCreateDateOld = Instant.now().minus(Duration.ofDays(400));
Execution executionOld = Execution.builder()
.tenantId(tenantId)
.id(IdUtils.create())
.namespace("io.kestra.unittest")
.flowId("some-execution-old")
.flowRevision(1)
.labels(Label.from(Map.of("country", "US")))
.state(new State(Type.SUCCESS,
List.of(new State.History(State.Type.CREATED, executionCreateDateOld), new State.History(Type.SUCCESS, executionCreateDateOld.plus(executionDuration)))))
.taskRunList(List.of())
.build();
executionRepository.save(executionOld);
var now = ZonedDateTime.now();
ArrayListTotal<Map<String, Object>> data = executionRepository.fetchData(tenantId, Executions.builder()
.type(Executions.class.getName())
.columns(Map.of(
"count", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).agg(AggregationType.COUNT).build(),
"id", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).build(),
"date", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.START_DATE).build(),
"duration", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.DURATION).build()
)).build(),
now.minusDays(365),
now,
null
);
// Should only return 1 execution (the recent one), not the 400-day-old execution
assertThat(data.getTotal()).isGreaterThanOrEqualTo(1L);
assertThat(data).isNotEmpty();
assertThat(data).first().hasFieldOrProperty("count");
}
private static Execution buildWithCreatedDate(String tenant, Instant instant) { private static Execution buildWithCreatedDate(String tenant, Instant instant) {
return Execution.builder() return Execution.builder()
.id(IdUtils.create()) .id(IdUtils.create())

View File

@@ -121,8 +121,7 @@ public abstract class AbstractFlowRepositoryTest {
QueryFilter.builder().field(Field.QUERY).value("filterFlowId").operation(Op.EQUALS).build(), QueryFilter.builder().field(Field.QUERY).value("filterFlowId").operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.EQUALS).build(), QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.NAMESPACE).value(SYSTEM_FLOWS_DEFAULT_NAMESPACE).operation(Op.EQUALS).build(), QueryFilter.builder().field(Field.NAMESPACE).value(SYSTEM_FLOWS_DEFAULT_NAMESPACE).operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build()
QueryFilter.builder().field(Field.FLOW_ID).value("filterFlowId").operation(Op.EQUALS).build()
); );
} }
@@ -146,6 +145,7 @@ public abstract class AbstractFlowRepositoryTest {
static Stream<QueryFilter> errorFilterCombinations() { static Stream<QueryFilter> errorFilterCombinations() {
return Stream.of( return Stream.of(
QueryFilter.builder().field(Field.FLOW_ID).value("sleep").operation(Op.EQUALS).build(),
QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(),
QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(),
QueryFilter.builder().field(Field.STATE).value(State.Type.RUNNING).operation(Op.EQUALS).build(), QueryFilter.builder().field(Field.STATE).value(State.Type.RUNNING).operation(Op.EQUALS).build(),

View File

@@ -1,91 +0,0 @@
package io.kestra.core.runners;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
@KestraTest(startRunner = true)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public abstract class AbstractRunnerConcurrencyTest {
public static final String TENANT_1 = "tenant1";
@Inject
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel.yml"})
void concurrencyCancel() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancel();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-fail.yml"})
void concurrencyFail() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyFail();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue.yml"})
void concurrencyQueue() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueue();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-pause.yml"})
protected void concurrencyQueuePause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueuePause();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel-pause.yml"})
protected void concurrencyCancelPause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancelPause();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
protected void flowConcurrencyWithForEachItem() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
protected void concurrencyQueueRestarted() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-after-execution.yml"})
void concurrencyQueueAfterExecution() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
void flowConcurrencySubflow() throws Exception {
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
}
@Test
@FlakyTest(description = "Only flaky in CI")
@LoadFlows({"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"})
protected void flowConcurrencyParallelSubflowKill() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-killed.yml"})
void flowConcurrencyKilled() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyKilled();
}
@Test
@FlakyTest(description = "Only flaky in CI")
@LoadFlows({"flows/valids/flow-concurrency-queue-killed.yml"})
void flowConcurrencyQueueKilled() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueKilled();
}
}

View File

@@ -66,6 +66,9 @@ public abstract class AbstractRunnerTest {
@Inject @Inject
protected LoopUntilCaseTest loopUntilTestCaseTest; protected LoopUntilCaseTest loopUntilTestCaseTest;
@Inject
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
@Inject @Inject
protected ScheduleDateCaseTest scheduleDateCaseTest; protected ScheduleDateCaseTest scheduleDateCaseTest;
@@ -419,6 +422,66 @@ public abstract class AbstractRunnerTest {
forEachItemCaseTest.forEachItemWithAfterExecution(); forEachItemCaseTest.forEachItemWithAfterExecution();
} }
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel.yml"})
void concurrencyCancel() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancel();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-fail.yml"})
void concurrencyFail() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyFail();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue.yml"})
void concurrencyQueue() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueue();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-pause.yml"})
protected void concurrencyQueuePause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueuePause();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-cancel-pause.yml"})
protected void concurrencyCancelPause() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyCancelPause();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
protected void flowConcurrencyWithForEachItem() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
protected void concurrencyQueueRestarted() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted();
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-after-execution.yml"})
void concurrencyQueueAfterExecution() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
void flowConcurrencySubflow() throws Exception {
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
}
@Test
@LoadFlows({"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"})
void flowConcurrencyParallelSubflowKill() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill();
}
@Test @Test
@ExecuteFlow("flows/valids/executable-fail.yml") @ExecuteFlow("flows/valids/executable-fail.yml")
void badExecutable(Execution execution) { void badExecutable(Execution execution) {

View File

@@ -69,7 +69,6 @@ public class FlowConcurrencyCaseTest {
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(Type.CANCELLED::equals); assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(Type.CANCELLED::equals);
} finally { } finally {
runnerUtils.killExecution(execution1); runnerUtils.killExecution(execution1);
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
} }
} }
@@ -85,7 +84,6 @@ public class FlowConcurrencyCaseTest {
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(State.Type.FAILED::equals); assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(State.Type.FAILED::equals);
} finally { } finally {
runnerUtils.killExecution(execution1); runnerUtils.killExecution(execution1);
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
} }
} }
@@ -242,94 +240,6 @@ public class FlowConcurrencyCaseTest {
assertThat(terminated.getTaskRunList()).isNull(); assertThat(terminated.getTaskRunList()).isNull();
} }
public void flowConcurrencyKilled() throws QueueException, InterruptedException {
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
.orElseThrow();
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
try {
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
// we kill execution 1, execution 2 should run but not execution 3
killQueue.emit(ExecutionKilledExecution
.builder()
.state(ExecutionKilled.State.REQUESTED)
.executionId(execution1.getId())
.isOnKillCascade(true)
.tenantId(MAIN_TENANT)
.build()
);
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution1);
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
assertThat(killed.getState().getHistories().stream().anyMatch(h -> h.getState() == Type.RUNNING)).isTrue();
// we now check that execution 2 is running
Execution running = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
assertThat(running.getState().getCurrent()).isEqualTo(Type.RUNNING);
// we check that execution 3 is still queued
Thread.sleep(100); // wait a little to be 100% sure
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution1);
runnerUtils.killExecution(execution2);
runnerUtils.killExecution(execution3);
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
runnerUtils.awaitFlowExecutionNumber(3, MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed");
}
}
public void flowConcurrencyQueueKilled() throws QueueException, InterruptedException {
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
.orElseThrow();
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
try {
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
// we kill execution 2, execution 3 should not run
killQueue.emit(ExecutionKilledExecution
.builder()
.state(ExecutionKilled.State.REQUESTED)
.executionId(execution2.getId())
.isOnKillCascade(true)
.tenantId(MAIN_TENANT)
.build()
);
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution2);
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
assertThat(killed.getState().getHistories().stream().noneMatch(h -> h.getState() == Type.RUNNING)).isTrue();
// we now check that execution 3 is still queued
Thread.sleep(100); // wait a little to be 100% sure
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
} finally {
// kill everything to avoid dangling executions
runnerUtils.killExecution(execution1);
runnerUtils.killExecution(execution2);
runnerUtils.killExecution(execution3);
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
runnerUtils.awaitFlowExecutionNumber(3, MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed");
}
}
private URI storageUpload(String tenantId) throws URISyntaxException, IOException { private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
File tempFile = File.createTempFile("file", ".txt"); File tempFile = File.createTempFile("file", ".txt");

View File

@@ -2,7 +2,9 @@ package io.kestra.core.runners;
import io.kestra.core.junit.annotations.KestraTest; import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.*; import io.kestra.core.models.flows.DependsOn;
import io.kestra.core.models.flows.Input;
import io.kestra.core.models.flows.Type;
import io.kestra.core.models.flows.input.FileInput; import io.kestra.core.models.flows.input.FileInput;
import io.kestra.core.models.flows.input.InputAndValue; import io.kestra.core.models.flows.input.InputAndValue;
import io.kestra.core.models.flows.input.IntInput; import io.kestra.core.models.flows.input.IntInput;
@@ -30,7 +32,6 @@ import org.reactivestreams.Publisher;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream; import java.io.InputStream;
import java.net.URI; import java.net.URI;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@@ -411,40 +412,6 @@ class FlowInputOutputTest {
assertThat(results.get("input")).isEqualTo("default"); assertThat(results.get("input")).isEqualTo("default");
} }
@Test
void shouldResolveZeroByteFileUpload() throws java.io.IOException {
File tempFile = File.createTempFile("empty", ".txt");
tempFile.deleteOnExit();
io.micronaut.http.multipart.CompletedFileUpload fileUpload = org.mockito.Mockito.mock(io.micronaut.http.multipart.CompletedFileUpload.class);
org.mockito.Mockito.when(fileUpload.getInputStream()).thenReturn(new java.io.FileInputStream(tempFile));
org.mockito.Mockito.when(fileUpload.getFilename()).thenReturn("empty.txt");
org.mockito.Mockito.when(fileUpload.getName()).thenReturn("empty_file");
Execution execution = Execution.builder()
.id(IdUtils.create())
.tenantId("unit_test_tenant")
.namespace("io.kestra.unittest")
.flowId("unittest")
.flowRevision(1)
.state(new State())
.build();
reactor.core.publisher.Mono<Map<String, Object>> result = flowInputOutput.readExecutionInputs(
List.of(
io.kestra.core.models.flows.input.FileInput.builder().id("empty_file").type(Type.FILE).build()
),
Flow.builder().id("unittest").namespace("io.kestra.unittest").build(),
execution,
reactor.core.publisher.Flux.just(fileUpload)
);
Map<String, Object> outputs = result.block();
Assertions.assertNotNull(outputs);
Assertions.assertTrue(outputs.containsKey("empty_file"));
}
private static class MemoryCompletedPart implements CompletedPart { private static class MemoryCompletedPart implements CompletedPart {
protected final String name; protected final String name;

View File

@@ -56,18 +56,6 @@ public class InputsTest {
@Inject @Inject
private NamespaceFactory namespaceFactory; private NamespaceFactory namespaceFactory;
private static final Map<String , Object> object = Map.of(
"people", List.of(
Map.of(
"first", "Mustafa",
"last", "Tarek"
),
Map.of(
"first", "Ahmed",
"last", "Tarek"
)
)
);
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder() public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
.put("string", "myString") .put("string", "myString")
.put("enum", "ENUM_VALUE") .put("enum", "ENUM_VALUE")
@@ -79,6 +67,7 @@ public class InputsTest {
.put("time", "18:27:49") .put("time", "18:27:49")
.put("duration", "PT5M6S") .put("duration", "PT5M6S")
.put("file", Objects.requireNonNull(InputsTest.class.getClassLoader().getResource("application-test.yml")).getPath()) .put("file", Objects.requireNonNull(InputsTest.class.getClassLoader().getResource("application-test.yml")).getPath())
.put("json", "{\"a\": \"b\"}")
.put("uri", "https://www.google.com") .put("uri", "https://www.google.com")
.put("nested.string", "a string") .put("nested.string", "a string")
.put("nested.more.int", "123") .put("nested.more.int", "123")
@@ -92,14 +81,11 @@ public class InputsTest {
.put("validatedTime", "11:27:49") .put("validatedTime", "11:27:49")
.put("secret", "secret") .put("secret", "secret")
.put("array", "[1, 2, 3]") .put("array", "[1, 2, 3]")
.put("json1", "{\"a\": \"b\"}") .put("yaml", """
.put("json2", object)
.put("yaml1", """
some: property some: property
alist: alist:
- of - of
- values""") - values""")
.put("yaml2", object)
.build(); .build();
@Inject @Inject
@@ -168,6 +154,7 @@ public class InputsTest {
assertThat(typeds.get("duration")).isEqualTo(Duration.parse("PT5M6S")); assertThat(typeds.get("duration")).isEqualTo(Duration.parse("PT5M6S"));
assertThat((URI) typeds.get("file")).isEqualTo(new URI("kestra:///io/kestra/tests/inputs/executions/test/inputs/file/application-test.yml")); assertThat((URI) typeds.get("file")).isEqualTo(new URI("kestra:///io/kestra/tests/inputs/executions/test/inputs/file/application-test.yml"));
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get("tenant1", null, (URI) typeds.get("file"))))).isEqualTo(CharStreams.toString(new InputStreamReader(new FileInputStream((String) inputs.get("file"))))); assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get("tenant1", null, (URI) typeds.get("file"))))).isEqualTo(CharStreams.toString(new InputStreamReader(new FileInputStream((String) inputs.get("file")))));
assertThat(typeds.get("json")).isEqualTo(Map.of("a", "b"));
assertThat(typeds.get("uri")).isEqualTo("https://www.google.com"); assertThat(typeds.get("uri")).isEqualTo("https://www.google.com");
assertThat(((Map<String, Object>) typeds.get("nested")).get("string")).isEqualTo("a string"); assertThat(((Map<String, Object>) typeds.get("nested")).get("string")).isEqualTo("a string");
assertThat((Boolean) ((Map<String, Object>) typeds.get("nested")).get("bool")).isTrue(); assertThat((Boolean) ((Map<String, Object>) typeds.get("nested")).get("bool")).isTrue();
@@ -183,12 +170,9 @@ public class InputsTest {
assertThat(typeds.get("array")).isInstanceOf(List.class); assertThat(typeds.get("array")).isInstanceOf(List.class);
assertThat((List<Integer>) typeds.get("array")).hasSize(3); assertThat((List<Integer>) typeds.get("array")).hasSize(3);
assertThat((List<Integer>) typeds.get("array")).isEqualTo(List.of(1, 2, 3)); assertThat((List<Integer>) typeds.get("array")).isEqualTo(List.of(1, 2, 3));
assertThat(typeds.get("json1")).isEqualTo(Map.of("a", "b")); assertThat(typeds.get("yaml")).isEqualTo(Map.of(
assertThat(typeds.get("json2")).isEqualTo(object);
assertThat(typeds.get("yaml1")).isEqualTo(Map.of(
"some", "property", "some", "property",
"alist", List.of("of", "values"))); "alist", List.of("of", "values")));
assertThat(typeds.get("yaml2")).isEqualTo(object);
} }
@Test @Test
@@ -217,7 +201,7 @@ public class InputsTest {
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs) (flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs)
); );
assertThat(execution.getTaskRunList()).hasSize(16); assertThat(execution.getTaskRunList()).hasSize(14);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS); assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).matches("kestra:///io/kestra/tests/inputs/executions/.*/inputs/file/application-test.yml"); assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).matches("kestra:///io/kestra/tests/inputs/executions/.*/inputs/file/application-test.yml");
// secret inputs are decrypted to be used as task properties // secret inputs are decrypted to be used as task properties
@@ -370,19 +354,19 @@ public class InputsTest {
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant14") @LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant14")
void inputEmptyJson() { void inputEmptyJson() {
HashMap<String, Object> map = new HashMap<>(inputs); HashMap<String, Object> map = new HashMap<>(inputs);
map.put("json1", "{}"); map.put("json", "{}");
Map<String, Object> typeds = typedInputs(map, "tenant14"); Map<String, Object> typeds = typedInputs(map, "tenant14");
assertThat(typeds.get("json1")).isInstanceOf(Map.class); assertThat(typeds.get("json")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) typeds.get("json1")).size()).isZero(); assertThat(((Map<?, ?>) typeds.get("json")).size()).isZero();
} }
@Test @Test
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant15") @LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant15")
void inputEmptyJsonFlow() throws TimeoutException, QueueException { void inputEmptyJsonFlow() throws TimeoutException, QueueException {
HashMap<String, Object> map = new HashMap<>(inputs); HashMap<String, Object> map = new HashMap<>(inputs);
map.put("json1", "{}"); map.put("json", "{}");
Execution execution = runnerUtils.runOne( Execution execution = runnerUtils.runOne(
"tenant15", "tenant15",
@@ -392,11 +376,11 @@ public class InputsTest {
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, map) (flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, map)
); );
assertThat(execution.getTaskRunList()).hasSize(16); assertThat(execution.getTaskRunList()).hasSize(14);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS); assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getInputs().get("json1")).isInstanceOf(Map.class); assertThat(execution.getInputs().get("json")).isInstanceOf(Map.class);
assertThat(((Map<?, ?>) execution.getInputs().get("json1")).size()).isZero(); assertThat(((Map<?, ?>) execution.getInputs().get("json")).size()).isZero();
assertThat((String) execution.findTaskRunsByTaskId("jsonOutput").getFirst().getOutputs().get("value")).isEqualTo("{}"); assertThat((String) execution.findTaskRunsByTaskId("jsonOutput").getFirst().getOutputs().get("value")).isEqualTo("{}");
} }

View File

@@ -122,10 +122,10 @@ class YamlParserTest {
void inputs() { void inputs() {
Flow flow = this.parse("flows/valids/inputs.yaml"); Flow flow = this.parse("flows/valids/inputs.yaml");
assertThat(flow.getInputs().size()).isEqualTo(31); assertThat(flow.getInputs().size()).isEqualTo(29);
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(12L); assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(11L);
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(19L); assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(18L);
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(4L); assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(3L);
assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput stringInput && stringInput.getValidator() != null).count()).isEqualTo(1L); assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput stringInput && stringInput.getValidator() != null).count()).isEqualTo(1L);
} }

View File

@@ -48,8 +48,8 @@ class ListUtilsTest {
void convertToListString(){ void convertToListString(){
assertThat(ListUtils.convertToListString(List.of("string1", "string2"))).isEqualTo(List.of("string1", "string2")); assertThat(ListUtils.convertToListString(List.of("string1", "string2"))).isEqualTo(List.of("string1", "string2"));
assertThat(ListUtils.convertToListString(List.of())).isEqualTo(List.of()); assertThat(ListUtils.convertToListString(List.of())).isEqualTo(List.of());
assertThat(ListUtils.convertToListString(List.of(1, 2, 3))).isEqualTo(List.of("1", "2", "3"));
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString("not a list")); assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString("not a list"));
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString(List.of(1, 2, 3)));
} }
} }

View File

@@ -1,11 +1,9 @@
package io.kestra.plugin.core.flow; package io.kestra.plugin.core.flow;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT; import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.as;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import io.kestra.core.junit.annotations.ExecuteFlow;
import io.kestra.core.junit.annotations.KestraTest; import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows; import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution; import io.kestra.core.models.executions.Execution;
@@ -102,14 +100,4 @@ class SwitchTest {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED); assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
} }
@Test
@ExecuteFlow("flows/valids/switch-in-concurrent-loop.yaml")
void switchInConcurrentLoop(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getTaskRunList()).hasSize(5);
// we check that OOMCRM_EB_DD_000 and OOMCRM_EB_DD_001 have been processed once
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_000")).count()).isEqualTo(1);
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_001")).count()).isEqualTo(1);
}
} }

View File

@@ -1,11 +0,0 @@
id: flow-concurrency-queue-killed
namespace: io.kestra.tests
concurrency:
behavior: QUEUE
limit: 1
tasks:
- id: sleep
type: io.kestra.plugin.core.flow.Sleep
duration: PT1M

View File

@@ -41,10 +41,7 @@ inputs:
- id: instantDefaults - id: instantDefaults
type: DATETIME type: DATETIME
defaults: "2013-08-09T14:19:00Z" defaults: "2013-08-09T14:19:00Z"
- id: json1 - id: json
type: JSON
required: false
- id: json2
type: JSON type: JSON
required: false required: false
- id: uri - id: uri
@@ -98,16 +95,7 @@ inputs:
- name: array - name: array
type: ARRAY type: ARRAY
itemType: INT itemType: INT
- name: yaml1 - name: yaml
type: YAML
defaults:
property: something
list:
- key: key1
value: value1
- key: key2
value: value2
- name: yaml2
type: YAML type: YAML
defaults: defaults:
property: something property: something
@@ -152,18 +140,12 @@ tasks:
type: io.kestra.plugin.core.debug.Return type: io.kestra.plugin.core.debug.Return
format: "{{taskrun.value}}" format: "{{taskrun.value}}"
- id: json1 - id: json
type: io.kestra.plugin.core.debug.Return type: io.kestra.plugin.core.debug.Return
format: "{{inputs.json1}}" format: "{{inputs.json}}"
- id: json2
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.json2}}"
- id: jsonOutput - id: jsonOutput
type: io.kestra.plugin.core.debug.Return type: io.kestra.plugin.core.debug.Return
format: "{{outputs.json1.value}}" format: "{{outputs.json.value}}"
- id: yamlOutput1 - id: yamlOutput
type: io.kestra.plugin.core.debug.Return type: io.kestra.plugin.core.debug.Return
format: "{{inputs.yaml1}}" format: "{{inputs.yaml}}"
- id: yamlOutput2
type: io.kestra.plugin.core.debug.Return
format: "{{inputs.yaml2}}"

View File

@@ -1,23 +0,0 @@
id: switch-in-concurrent-loop
namespace: io.kestra.tests
tasks:
- id: iterate_and_check_name
type: io.kestra.plugin.core.flow.ForEach
tasks:
- id: switch
type: io.kestra.plugin.core.flow.Switch
value: "{{ taskrun.value }}"
cases:
"Alice":
- id: OOMCRM_EB_DD_000
type: io.kestra.plugin.core.log.Log
message: Alice
"Bob":
- id: OOMCRM_EB_DD_001
type: io.kestra.plugin.core.log.Log
message: Bob
values: ["Alice", "Bob"]
concurrencyLimit: 0

View File

@@ -13,19 +13,18 @@ tasks:
- io.test.second - io.test.second
- io.test.third - io.test.third
enabled: true enabled: true
folderPerNamespace: true
exclude: exclude:
- /ignore/** - /ignore/**
tasks: tasks:
- id: t1 - id: t1
type: io.kestra.core.tasks.test.Read type: io.kestra.core.tasks.test.Read
path: "/io.test.third/test/a/b/c/1.txt" path: "/test/a/b/c/1.txt"
- id: t2 - id: t2
type: io.kestra.core.tasks.test.Read type: io.kestra.core.tasks.test.Read
path: "/io.test.second/a/b/c/2.txt" path: "/a/b/c/2.txt"
- id: t3 - id: t3
type: io.kestra.core.tasks.test.Read type: io.kestra.core.tasks.test.Read
path: "/io.test.first/a/b/3.txt" path: "/a/b/3.txt"
- id: t4 - id: t4
type: io.kestra.core.tasks.test.Read type: io.kestra.core.tasks.test.Read
path: "/ignore/4.txt" path: "/ignore/4.txt"

View File

@@ -16,7 +16,7 @@ public final class H2RepositoryUtils {
case MONTH: case MONTH:
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM')", Date.class); return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM')", Date.class);
case WEEK: case WEEK:
return DSL.field("DATE_TRUNC('WEEK', \"" + dateField + "\")", Date.class); return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'YYYY-ww')", Date.class);
case DAY: case DAY:
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM-dd')", Date.class); return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM-dd')", Date.class);
case HOUR: case HOUR:

View File

@@ -1,6 +0,0 @@
package io.kestra.runner.h2;
import io.kestra.core.runners.AbstractRunnerConcurrencyTest;
public class H2RunnerConcurrencyTest extends AbstractRunnerConcurrencyTest {
}

View File

@@ -16,7 +16,7 @@ public final class MysqlRepositoryUtils {
case MONTH: case MONTH:
return DSL.field("DATE_FORMAT({0}, '%Y-%m')", Date.class, DSL.field(dateField)); return DSL.field("DATE_FORMAT({0}, '%Y-%m')", Date.class, DSL.field(dateField));
case WEEK: case WEEK:
return DSL.field("STR_TO_DATE(CONCAT(YEARWEEK({0}, 3), ' Monday'), '%X%V %W')", Date.class, DSL.field(dateField)); return DSL.field("DATE_FORMAT({0}, '%x-%v')", Date.class, DSL.field(dateField));
case DAY: case DAY:
return DSL.field("DATE({0})", Date.class, DSL.field(dateField)); return DSL.field("DATE({0})", Date.class, DSL.field(dateField));
case HOUR: case HOUR:

View File

@@ -1,6 +0,0 @@
package io.kestra.runner.mysql;
import io.kestra.core.runners.AbstractRunnerConcurrencyTest;
public class MysqlRunnerConcurrencyTest extends AbstractRunnerConcurrencyTest {
}

View File

@@ -16,7 +16,7 @@ public final class PostgresRepositoryUtils {
case MONTH: case MONTH:
return DSL.field("TO_CHAR({0}, 'YYYY-MM')", Date.class, DSL.field(dateField)); return DSL.field("TO_CHAR({0}, 'YYYY-MM')", Date.class, DSL.field(dateField));
case WEEK: case WEEK:
return DSL.field("DATE_TRUNC('week', {0})", Date.class, DSL.field(dateField)); return DSL.field("TO_CHAR({0}, 'IYYY-IW')", Date.class, DSL.field(dateField));
case DAY: case DAY:
return DSL.field("DATE({0})", Date.class, DSL.field(dateField)); return DSL.field("DATE({0})", Date.class, DSL.field(dateField));
case HOUR: case HOUR:

View File

@@ -1,6 +0,0 @@
package io.kestra.runner.postgres;
import io.kestra.core.runners.AbstractRunnerConcurrencyTest;
public class PostgresRunnerConcurrencyTest extends AbstractRunnerConcurrencyTest {
}

View File

@@ -639,14 +639,6 @@ public abstract class AbstractJdbcFlowRepository extends AbstractJdbcRepository
return (SelectConditionStep<R>) select; return (SelectConditionStep<R>) select;
} }
protected Name getColumnName(QueryFilter.Field field){
if (QueryFilter.Field.FLOW_ID.equals(field)) {
return DSL.quotedName("id");
} else {
return DSL.quotedName(field.name().toLowerCase());
}
}
abstract protected Condition findSourceCodeCondition(String query); abstract protected Condition findSourceCodeCondition(String query);
@Override @Override

View File

@@ -2,7 +2,6 @@ package io.kestra.jdbc.repository;
import io.kestra.core.exceptions.InvalidQueryFiltersException; import io.kestra.core.exceptions.InvalidQueryFiltersException;
import io.kestra.core.models.QueryFilter; import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.QueryFilter.Op;
import io.kestra.core.models.QueryFilter.Resource; import io.kestra.core.models.QueryFilter.Resource;
import io.kestra.core.models.dashboards.ColumnDescriptor; import io.kestra.core.models.dashboards.ColumnDescriptor;
import io.kestra.core.models.dashboards.DataFilter; import io.kestra.core.models.dashboards.DataFilter;
@@ -292,7 +291,7 @@ public abstract class AbstractJdbcRepository {
} }
// Handle Field.CHILD_FILTER // Handle Field.CHILD_FILTER
if (field.equals(QueryFilter.Field.CHILD_FILTER)) { if (field.equals(QueryFilter.Field.CHILD_FILTER)) {
return handleChildFilter(value, operation); return handleChildFilter(value);
} }
// Handling for Field.MIN_LEVEL // Handling for Field.MIN_LEVEL
if (field.equals(QueryFilter.Field.MIN_LEVEL)) { if (field.equals(QueryFilter.Field.MIN_LEVEL)) {
@@ -323,51 +322,32 @@ public abstract class AbstractJdbcRepository {
throw new InvalidQueryFiltersException("Label field value must be instance of Map or String"); throw new InvalidQueryFiltersException("Label field value must be instance of Map or String");
} }
} }
if (field == QueryFilter.Field.KIND) {
return applyKindCondition(value,operation);
}
// Convert the field name to lowercase and quote it // Convert the field name to lowercase and quote it
Name columnName = getColumnName(field); Name columnName = DSL.quotedName(field.name().toLowerCase());
// Default handling for other fields // Default handling for other fields
return switch (operation) { return switch (operation) {
case EQUALS -> DSL.field(columnName).eq(primitiveOrToString(value)); case EQUALS -> DSL.field(columnName).eq(value);
case NOT_EQUALS -> DSL.field(columnName).ne(primitiveOrToString(value)); case NOT_EQUALS -> DSL.field(columnName).ne(value);
case GREATER_THAN -> DSL.field(columnName).greaterThan(value); case GREATER_THAN -> DSL.field(columnName).greaterThan(value);
case LESS_THAN -> DSL.field(columnName).lessThan(value); case LESS_THAN -> DSL.field(columnName).lessThan(value);
case IN -> DSL.field(columnName).in(ListUtils.convertToListString(value)); case IN -> DSL.field(columnName).in(ListUtils.convertToList(value));
case NOT_IN -> DSL.field(columnName).notIn(ListUtils.convertToListString(value)); case NOT_IN -> DSL.field(columnName).notIn(ListUtils.convertToList(value));
case STARTS_WITH -> DSL.field(columnName).like(value + "%"); case STARTS_WITH -> DSL.field(columnName).like(value + "%");
case ENDS_WITH -> DSL.field(columnName).like("%" + value); case ENDS_WITH -> DSL.field(columnName).like("%" + value);
case CONTAINS -> DSL.field(columnName).like("%" + value + "%"); case CONTAINS -> DSL.field(columnName).like("%" + value + "%");
case REGEX -> DSL.field(columnName).likeRegex((String) value); case REGEX -> DSL.field(columnName).likeRegex((String) value);
case PREFIX -> DSL.field(columnName).like(value + "%") case PREFIX -> DSL.field(columnName).like(value + ".%")
.or(DSL.field(columnName).eq(value)); .or(DSL.field(columnName).eq(value));
default -> throw new InvalidQueryFiltersException("Unsupported operation: " + operation); default -> throw new InvalidQueryFiltersException("Unsupported operation: " + operation);
}; };
} }
private static Object primitiveOrToString(Object o) {
if (o == null) return null;
if (o instanceof Boolean
|| o instanceof Byte
|| o instanceof Short
|| o instanceof Integer
|| o instanceof Long
|| o instanceof Float
|| o instanceof Double
|| o instanceof Character
|| o instanceof String) {
return o;
}
return o.toString();
}
protected Name getColumnName(QueryFilter.Field field){
return DSL.quotedName(field.name().toLowerCase());
}
protected Condition findQueryCondition(String query) { protected Condition findQueryCondition(String query) {
throw new InvalidQueryFiltersException("Unsupported operation: "); throw new InvalidQueryFiltersException("Unsupported operation: ");
} }
@@ -411,13 +391,12 @@ public abstract class AbstractJdbcRepository {
} }
// Handle CHILD_FILTER field logic // Handle CHILD_FILTER field logic
private Condition handleChildFilter(Object value, Op operation) { private Condition handleChildFilter(Object value) {
ChildFilter childFilter = (value instanceof String val) ? ChildFilter.valueOf(val) : (ChildFilter) value; ChildFilter childFilter = (value instanceof String val) ? ChildFilter.valueOf(val) : (ChildFilter) value;
return switch (operation) { return switch (childFilter) {
case EQUALS -> childFilter.equals(ChildFilter.CHILD) ? field("trigger_execution_id").isNotNull() : field("trigger_execution_id").isNull(); case CHILD -> field("trigger_execution_id").isNotNull();
case NOT_EQUALS -> childFilter.equals(ChildFilter.CHILD) ? field("trigger_execution_id").isNull() : field("trigger_execution_id").isNotNull(); case MAIN -> field("trigger_execution_id").isNull();
default -> throw new InvalidQueryFiltersException("Unsupported operation for child filter field: " + operation);
}; };
} }
@@ -468,6 +447,15 @@ public abstract class AbstractJdbcRepository {
default -> throw new InvalidQueryFiltersException("Unsupported operation for SCOPE: " + operation); default -> throw new InvalidQueryFiltersException("Unsupported operation for SCOPE: " + operation);
}; };
} }
private Condition applyKindCondition(Object value, QueryFilter.Op operation) {
String kind = value.toString();
return switch (operation) {
case EQUALS -> field("kind").eq(kind);
case NOT_EQUALS -> field("kind").ne(kind);
default -> throw new InvalidQueryFiltersException("Unsupported operation for KIND: " + operation);
};
}
protected Field<Date> formatDateField(String dateField, DateUtils.GroupType groupType) { protected Field<Date> formatDateField(String dateField, DateUtils.GroupType groupType) {
throw new UnsupportedOperationException("formatDateField() not implemented"); throw new UnsupportedOperationException("formatDateField() not implemented");

View File

@@ -658,16 +658,21 @@ public class JdbcExecutor implements ExecutorInterface {
workerTaskResults.add(new WorkerTaskResult(taskRun)); workerTaskResults.add(new WorkerTaskResult(taskRun));
} }
} }
// flowable attempt state transition to running /// flowable attempt state transition to running
if (workerTask.getTask().isFlowable()) { if (workerTask.getTask().isFlowable()) {
TaskRun updatedTaskRun = workerTask.getTaskRun() List<TaskRunAttempt> attempts = Optional.ofNullable(workerTask.getTaskRun().getAttempts())
.withAttempts( .map(ArrayList::new)
List.of( .orElseGet(ArrayList::new);
attempts.add(
TaskRunAttempt.builder() TaskRunAttempt.builder()
.state(new State().withState(State.Type.RUNNING)) .state(new State().withState(State.Type.RUNNING))
.build() .build()
) );
)
TaskRun updatedTaskRun = workerTask.getTaskRun()
.withAttempts(attempts)
.withState(State.Type.RUNNING); .withState(State.Type.RUNNING);
workerTaskResults.add(new WorkerTaskResult(updatedTaskRun)); workerTaskResults.add(new WorkerTaskResult(updatedTaskRun));
@@ -1200,17 +1205,16 @@ public class JdbcExecutor implements ExecutorInterface {
// check if there exist a queued execution and submit it to the execution queue // check if there exist a queued execution and submit it to the execution queue
if (executor.getFlow().getConcurrency() != null) { if (executor.getFlow().getConcurrency() != null) {
// decrement execution concurrency limit
// if an execution was queued but never running, it would have never been counted inside the concurrency limit and should not lead to popping a new queued execution // if an execution was queued but never running, it would have never been counted inside the concurrency limit and should not lead to popping a new queued execution
// this could only happen for KILLED execution.
boolean queuedThenKilled = execution.getState().getCurrent() == State.Type.KILLED boolean queuedThenKilled = execution.getState().getCurrent() == State.Type.KILLED
&& execution.getState().getHistories().stream().anyMatch(h -> h.getState().isQueued()) && execution.getState().getHistories().stream().anyMatch(h -> h.getState().isQueued())
&& execution.getState().getHistories().stream().noneMatch(h -> h.getState().onlyRunning()); && execution.getState().getHistories().stream().noneMatch(h -> h.getState().isRunning());
// if an execution was FAILED or CANCELLED due to concurrency limit exceeded, it would have never been counter inside the concurrency limit and should not lead to popping a new queued execution
boolean concurrencyShortCircuitState = Concurrency.possibleTransitions(execution.getState().getCurrent()) boolean concurrencyShortCircuitState = Concurrency.possibleTransitions(execution.getState().getCurrent())
&& execution.getState().getHistories().get(execution.getState().getHistories().size() - 2).getState().isCreated(); && execution.getState().getHistories().get(execution.getState().getHistories().size() - 2).getState().isCreated();
// as we may receive multiple time killed execution (one when we kill it, then one for each running worker task), we limit to the first we receive: when the state transitionned from KILLING to KILLED if (!queuedThenKilled && !concurrencyShortCircuitState) {
boolean killingThenKilled = execution.getState().getCurrent().isKilled() && executor.getOriginalState() == State.Type.KILLING;
if (!queuedThenKilled && !concurrencyShortCircuitState && (!execution.getState().getCurrent().isKilled() || killingThenKilled)) {
// decrement execution concurrency limit and pop a new queued execution if needed
concurrencyLimitStorage.decrement(executor.getFlow()); concurrencyLimitStorage.decrement(executor.getFlow());
if (executor.getFlow().getConcurrency().getBehavior() == Concurrency.Behavior.QUEUE) { if (executor.getFlow().getConcurrency().getBehavior() == Concurrency.Behavior.QUEUE) {
@@ -1246,8 +1250,7 @@ public class JdbcExecutor implements ExecutorInterface {
// IMPORTANT: this is safe as only the executor is listening to WorkerTaskResult, // IMPORTANT: this is safe as only the executor is listening to WorkerTaskResult,
// and we are sure at this stage that all WorkerJob has been listened and processed by the Worker. // and we are sure at this stage that all WorkerJob has been listened and processed by the Worker.
// If any of these assumptions changed, this code would not be safe anymore. // If any of these assumptions changed, this code would not be safe anymore.
// One notable exception is for killed flow as the KILLED worker task result may arrive late so removing them is a racy as we may remove them before they are processed if (cleanWorkerJobQueue && !ListUtils.isEmpty(executor.getExecution().getTaskRunList())) {
if (cleanWorkerJobQueue && !ListUtils.isEmpty(executor.getExecution().getTaskRunList()) && !execution.getState().getCurrent().isKilled()) {
List<String> taskRunKeys = executor.getExecution().getTaskRunList().stream() List<String> taskRunKeys = executor.getExecution().getTaskRunList().stream()
.map(taskRun -> taskRun.getId()) .map(taskRun -> taskRun.getId())
.toList(); .toList();

View File

@@ -20,10 +20,10 @@ dependencies {
def kafkaVersion = "4.1.1" def kafkaVersion = "4.1.1"
def opensearchVersion = "3.2.0" def opensearchVersion = "3.2.0"
def opensearchRestVersion = "3.3.2" def opensearchRestVersion = "3.3.2"
def flyingSaucerVersion = "10.0.6" def flyingSaucerVersion = "10.0.5"
def jacksonVersion = "2.20.1" def jacksonVersion = "2.20.1"
def jacksonAnnotationsVersion = "2.20" def jacksonAnnotationsVersion = "2.20"
def jugVersion = "5.2.0" def jugVersion = "5.1.1"
def langchain4jVersion = "1.9.1" def langchain4jVersion = "1.9.1"
def langchain4jCommunityVersion = "1.9.1-beta17" def langchain4jCommunityVersion = "1.9.1-beta17"
@@ -35,7 +35,7 @@ dependencies {
// we define cloud bom here for GCP, Azure and AWS so they are aligned for all plugins that use them (secret, storage, oss and ee plugins) // we define cloud bom here for GCP, Azure and AWS so they are aligned for all plugins that use them (secret, storage, oss and ee plugins)
api platform('com.google.cloud:libraries-bom:26.72.0') api platform('com.google.cloud:libraries-bom:26.72.0')
api platform("com.azure:azure-sdk-bom:1.3.3") api platform("com.azure:azure-sdk-bom:1.3.3")
api platform('software.amazon.awssdk:bom:2.40.5') api platform('software.amazon.awssdk:bom:2.40.0')
api platform("dev.langchain4j:langchain4j-bom:$langchain4jVersion") api platform("dev.langchain4j:langchain4j-bom:$langchain4jVersion")
api platform("dev.langchain4j:langchain4j-community-bom:$langchain4jCommunityVersion") api platform("dev.langchain4j:langchain4j-community-bom:$langchain4jCommunityVersion")
@@ -77,7 +77,7 @@ dependencies {
api "org.apache.kafka:kafka-clients:$kafkaVersion" api "org.apache.kafka:kafka-clients:$kafkaVersion"
api "org.apache.kafka:kafka-streams:$kafkaVersion" api "org.apache.kafka:kafka-streams:$kafkaVersion"
// AWS CRT is not included in the AWS BOM but needed for the S3 Transfer manager // AWS CRT is not included in the AWS BOM but needed for the S3 Transfer manager
api 'software.amazon.awssdk.crt:aws-crt:0.40.3' api 'software.amazon.awssdk.crt:aws-crt:0.40.1'
// Other libs // Other libs
api("org.projectlombok:lombok:1.18.42") api("org.projectlombok:lombok:1.18.42")
@@ -133,7 +133,7 @@ dependencies {
api 'org.codehaus.plexus:plexus-utils:3.0.24' // https://nvd.nist.gov/vuln/detail/CVE-2022-4244 api 'org.codehaus.plexus:plexus-utils:3.0.24' // https://nvd.nist.gov/vuln/detail/CVE-2022-4244
// for jOOQ to the same version as we use in EE // for jOOQ to the same version as we use in EE
api ("org.jooq:jooq:3.20.10") api ("org.jooq:jooq:3.20.9")
// Tests // Tests
api "org.junit-pioneer:junit-pioneer:2.3.0" api "org.junit-pioneer:junit-pioneer:2.3.0"
@@ -142,7 +142,7 @@ dependencies {
api group: 'org.exparity', name: 'hamcrest-date', version: '2.0.8' api group: 'org.exparity', name: 'hamcrest-date', version: '2.0.8'
api "org.wiremock:wiremock-jetty12:3.13.2" api "org.wiremock:wiremock-jetty12:3.13.2"
api "org.apache.kafka:kafka-streams-test-utils:$kafkaVersion" api "org.apache.kafka:kafka-streams-test-utils:$kafkaVersion"
api "com.microsoft.playwright:playwright:1.57.0" api "com.microsoft.playwright:playwright:1.56.0"
api "org.awaitility:awaitility:4.3.0" api "org.awaitility:awaitility:4.3.0"
// Kestra components // Kestra components

View File

@@ -24,6 +24,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
public class DockerService { public class DockerService {
// DDDDDDD
public static DockerClient client(DockerClientConfig dockerClientConfig) { public static DockerClient client(DockerClientConfig dockerClientConfig) {
DockerHttpClient dockerHttpClient = new ApacheDockerHttpClient.Builder() DockerHttpClient dockerHttpClient = new ApacheDockerHttpClient.Builder()
.dockerHost(dockerClientConfig.getDockerHost()) .dockerHost(dockerClientConfig.getDockerHost())

View File

@@ -34,7 +34,7 @@
<!-- Load Google Fonts non-blocking --> <!-- Load Google Fonts non-blocking -->
<link <link
rel="stylesheet" rel="stylesheet"
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;600;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap" href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
media="print" media="print"
onload="this.media='all'" onload="this.media='all'"
> >
@@ -43,7 +43,7 @@
<noscript> <noscript>
<link <link
rel="stylesheet" rel="stylesheet"
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;600;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap" href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
> >
</noscript> </noscript>
</head> </head>

41
ui/package-lock.json generated
View File

@@ -9,9 +9,8 @@
"version": "0.0.0", "version": "0.0.0",
"hasInstallScript": true, "hasInstallScript": true,
"dependencies": { "dependencies": {
"@esbuild/linux-x64": "0.27.1",
"@js-joda/core": "^5.6.5", "@js-joda/core": "^5.6.5",
"@kestra-io/ui-libs": "^0.0.264", "@kestra-io/ui-libs": "^0.0.263",
"@vue-flow/background": "^1.3.2", "@vue-flow/background": "^1.3.2",
"@vue-flow/controls": "^1.1.2", "@vue-flow/controls": "^1.1.2",
"@vue-flow/core": "^1.47.0", "@vue-flow/core": "^1.47.0",
@@ -78,7 +77,7 @@
"@types/humanize-duration": "^3.27.4", "@types/humanize-duration": "^3.27.4",
"@types/js-yaml": "^4.0.9", "@types/js-yaml": "^4.0.9",
"@types/moment": "^2.13.0", "@types/moment": "^2.13.0",
"@types/node": "^24.10.2", "@types/node": "^24.10.1",
"@types/nprogress": "^0.2.3", "@types/nprogress": "^0.2.3",
"@types/path-browserify": "^1.0.3", "@types/path-browserify": "^1.0.3",
"@types/semver": "^7.7.1", "@types/semver": "^7.7.1",
@@ -121,9 +120,9 @@
"vue-tsc": "^3.1.4" "vue-tsc": "^3.1.4"
}, },
"optionalDependencies": { "optionalDependencies": {
"@esbuild/darwin-arm64": "^0.27.1", "@esbuild/darwin-arm64": "^0.27.0",
"@esbuild/darwin-x64": "^0.27.1", "@esbuild/darwin-x64": "^0.27.0",
"@esbuild/linux-x64": "^0.27.1", "@esbuild/linux-x64": "^0.27.0",
"@rollup/rollup-darwin-arm64": "^4.53.3", "@rollup/rollup-darwin-arm64": "^4.53.3",
"@rollup/rollup-darwin-x64": "^4.53.3", "@rollup/rollup-darwin-x64": "^4.53.3",
"@rollup/rollup-linux-x64-gnu": "^4.53.3", "@rollup/rollup-linux-x64-gnu": "^4.53.3",
@@ -1343,9 +1342,9 @@
} }
}, },
"node_modules/@esbuild/darwin-arm64": { "node_modules/@esbuild/darwin-arm64": {
"version": "0.27.1", "version": "0.27.0",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.1.tgz", "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.0.tgz",
"integrity": "sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==", "integrity": "sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
@@ -1359,9 +1358,9 @@
} }
}, },
"node_modules/@esbuild/darwin-x64": { "node_modules/@esbuild/darwin-x64": {
"version": "0.27.1", "version": "0.27.0",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.1.tgz", "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.0.tgz",
"integrity": "sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==", "integrity": "sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
@@ -1545,9 +1544,9 @@
} }
}, },
"node_modules/@esbuild/linux-x64": { "node_modules/@esbuild/linux-x64": {
"version": "0.27.1", "version": "0.27.0",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.1.tgz", "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.0.tgz",
"integrity": "sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==", "integrity": "sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
@@ -2830,9 +2829,9 @@
"license": "BSD-3-Clause" "license": "BSD-3-Clause"
}, },
"node_modules/@kestra-io/ui-libs": { "node_modules/@kestra-io/ui-libs": {
"version": "0.0.264", "version": "0.0.263",
"resolved": "https://registry.npmjs.org/@kestra-io/ui-libs/-/ui-libs-0.0.264.tgz", "resolved": "https://registry.npmjs.org/@kestra-io/ui-libs/-/ui-libs-0.0.263.tgz",
"integrity": "sha512-yUZDNaE0wUPOuEq/FL/TQBRd1fTV2dyM8s+VcGRjNSM1uv1uZcsSHro56/heHQx17lo00FDcPT7BMKEifrVhBg==", "integrity": "sha512-j1rWqcQAK2CudNBkcDPjUXyaGFeBzJ7QEhPKFAbleHSw0N3QFu/iy0rFZxJNIMWRi1mGZBh74D6vL0OqQJkT2Q==",
"dependencies": { "dependencies": {
"@nuxtjs/mdc": "^0.17.3", "@nuxtjs/mdc": "^0.17.3",
"@popperjs/core": "^2.11.8", "@popperjs/core": "^2.11.8",
@@ -6010,9 +6009,9 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "24.10.2", "version": "24.10.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.2.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
"integrity": "sha512-WOhQTZ4G8xZ1tjJTvKOpyEVSGgOTvJAfDK3FNFgELyaTpzhdgHVHeqW8V+UJvzF5BT+/B54T/1S2K6gd9c7bbA==", "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {

View File

@@ -24,7 +24,7 @@
}, },
"dependencies": { "dependencies": {
"@js-joda/core": "^5.6.5", "@js-joda/core": "^5.6.5",
"@kestra-io/ui-libs": "^0.0.264", "@kestra-io/ui-libs": "^0.0.263",
"@vue-flow/background": "^1.3.2", "@vue-flow/background": "^1.3.2",
"@vue-flow/controls": "^1.1.2", "@vue-flow/controls": "^1.1.2",
"@vue-flow/core": "^1.47.0", "@vue-flow/core": "^1.47.0",
@@ -91,7 +91,7 @@
"@types/humanize-duration": "^3.27.4", "@types/humanize-duration": "^3.27.4",
"@types/js-yaml": "^4.0.9", "@types/js-yaml": "^4.0.9",
"@types/moment": "^2.13.0", "@types/moment": "^2.13.0",
"@types/node": "^24.10.2", "@types/node": "^24.10.1",
"@types/nprogress": "^0.2.3", "@types/nprogress": "^0.2.3",
"@types/path-browserify": "^1.0.3", "@types/path-browserify": "^1.0.3",
"@types/semver": "^7.7.1", "@types/semver": "^7.7.1",
@@ -134,9 +134,9 @@
"vue-tsc": "^3.1.4" "vue-tsc": "^3.1.4"
}, },
"optionalDependencies": { "optionalDependencies": {
"@esbuild/darwin-arm64": "^0.27.1", "@esbuild/darwin-arm64": "^0.27.0",
"@esbuild/darwin-x64": "^0.27.1", "@esbuild/darwin-x64": "^0.27.0",
"@esbuild/linux-x64": "^0.27.1", "@esbuild/linux-x64": "^0.27.0",
"@rollup/rollup-darwin-arm64": "^4.53.3", "@rollup/rollup-darwin-arm64": "^4.53.3",
"@rollup/rollup-darwin-x64": "^4.53.3", "@rollup/rollup-darwin-x64": "^4.53.3",
"@rollup/rollup-linux-x64-gnu": "^4.53.3", "@rollup/rollup-linux-x64-gnu": "^4.53.3",

View File

@@ -1,6 +1,6 @@
<template> <template>
<el-row :gutter="32"> <el-row :gutter="32">
<el-col :xs="24" :md="8" v-for="characteristics in editionCharacteristics" :key="characteristics.name" class="edition-col"> <el-col :span="8" v-for="characteristics in editionCharacteristics" :key="characteristics.name">
<EditionCharacteristics <EditionCharacteristics
class="h-100" class="h-100"
:name="characteristics.name" :name="characteristics.name"
@@ -115,17 +115,3 @@
} }
] ]
</script> </script>
<style scoped lang="scss">
.edition-col {
margin-bottom: 2rem;
&:last-child {
margin-bottom: 0;
}
@media (min-width: 992px) {
margin-bottom: 0;
}
}
</style>

View File

@@ -184,9 +184,7 @@
display: flex; display: flex;
align-items: center; align-items: center;
gap: 1rem; gap: 1rem;
min-height: 2rem; height: 2rem;
padding-top: 0.25rem;
padding-bottom: 0.25rem;
.usage-icon { .usage-icon {
display: flex; display: flex;
@@ -194,7 +192,6 @@
justify-content: center; justify-content: center;
width: 24px; width: 24px;
height: 24px; height: 24px;
flex-shrink: 0;
:deep(.material-design-icon__svg) { :deep(.material-design-icon__svg) {
font-size: 24px; font-size: 24px;
@@ -204,9 +201,11 @@
} }
.usage-label { .usage-label {
line-height: 1;
display: flex;
align-items: center;
font-size: 14px; font-size: 14px;
color: var(--ks-content-primary); color: var(--ks-content-primary);
line-height: 1.2;
} }
.usage-divider { .usage-divider {
@@ -216,16 +215,15 @@
} }
.usage-value { .usage-value {
font-size: 14px; line-height: 1;
line-height: 1.2; display: flex;
white-space: nowrap; align-items: center;
} }
.el-button { .el-button {
color: var(--ks-content-primary); color: var(--ks-content-primary);
display: flex; display: flex;
align-items: center; align-items: center;
flex-shrink: 0;
} }
} }
} }

View File

@@ -87,7 +87,7 @@
</el-form-item> </el-form-item>
<div class="password-requirements mb-4"> <div class="password-requirements mb-4">
<el-text> <el-text>
{{ t('setup.form.password_requirements') }} 8+ chars, 1 upper, 1 number
</el-text> </el-text>
</div> </div>
</el-form> </el-form>
@@ -502,7 +502,7 @@
localStorage.removeItem("basicAuthUserCreated") localStorage.removeItem("basicAuthUserCreated")
localStorage.setItem("basicAuthSetupCompletedAt", new Date().toISOString()) localStorage.setItem("basicAuthSetupCompletedAt", new Date().toISOString())
router.push({name: "welcome"}) router.push({name: "login"})
} }
</script> </script>

View File

@@ -101,7 +101,7 @@ $checkbox-checked-color: #8405FF;
.el-text { .el-text {
color: var(--ks-content-tertiary); color: var(--ks-content-tertiary);
font-size: 12px; font-size: 14px;
} }
} }

View File

@@ -3,8 +3,6 @@ import Utils from "../../../utils/utils";
import {cssVariable, State} from "@kestra-io/ui-libs"; import {cssVariable, State} from "@kestra-io/ui-libs";
import {getSchemeValue} from "../../../utils/scheme"; import {getSchemeValue} from "../../../utils/scheme";
import {useMiscStore} from "override/stores/misc";
export function tooltip(tooltipModel: { export function tooltip(tooltipModel: {
title?: string[]; title?: string[];
body?: { lines: string[] }[]; body?: { lines: string[] }[];
@@ -117,7 +115,7 @@ export function extractState(value: any) {
return value; return value;
} }
export function chartClick(moment: any, router: any, route: any, event: any, parsedData: any, elements: any, type = "label", filters: Record<string, any> = {}) { export function chartClick(moment: any, router: any, route: any, event: any, parsedData: any, elements: any, type = "label") {
const query: Record<string, any> = {}; const query: Record<string, any> = {};
if (elements && parsedData) { if (elements && parsedData) {
@@ -194,11 +192,7 @@ export function chartClick(moment: any, router: any, route: any, event: any, par
params: { params: {
tenant: route.params.tenant, tenant: route.params.tenant,
}, },
query: { query: query,
...query,
...filters,
"filters[timeRange][EQUALS]":useMiscStore()?.configs?.chartDefaultDuration ?? "P30D"
},
}); });
} }
} }

View File

@@ -49,8 +49,6 @@
showDefault: {type: Boolean, default: false}, showDefault: {type: Boolean, default: false},
short: {type: Boolean, default: false}, short: {type: Boolean, default: false},
execution: {type: Boolean, default: false}, execution: {type: Boolean, default: false},
flow: {type: String, default: undefined},
namespace: {type: String, default: undefined},
}); });
@@ -155,10 +153,7 @@
if (data.type === "io.kestra.plugin.core.dashboard.data.Logs" || props.execution) { if (data.type === "io.kestra.plugin.core.dashboard.data.Logs" || props.execution) {
return; return;
} }
chartClick(moment, router, route, {}, parsedData.value, elements, "label", { chartClick(moment, router, route, {}, parsedData.value, elements, "label");
...(props.namespace ? {"filters[namespace][IN]": props.namespace} : {}),
...(props.flow ? {"filters[flowId][EQUALS]": props.flow} : {})
});
}, },
}, theme.value); }, theme.value);
}); });

View File

@@ -40,9 +40,12 @@ export function useExecutionRoot() {
title: route.params.id as string, title: route.params.id as string,
breadcrumb: [ breadcrumb: [
{ {
label: t("executions"), label: t("flows"),
link: { link: {
name: "executions/list" name: "flows/list",
query: {
namespace: ns
}
} }
}, },
{ {
@@ -54,6 +57,17 @@ export function useExecutionRoot() {
id: flowId id: flowId
} }
} }
},
{
label: t("executions"),
link: {
name: "flows/update",
params: {
namespace: ns,
id: flowId,
tab: "executions"
}
}
} }
] ]
}; };

View File

@@ -120,6 +120,14 @@
:execution :execution
/> />
<!-- TODO: To be reworked and integrated into the Cascader component -->
<TriggerCascader
:title="t('trigger')"
:empty="t('no_trigger')"
:elements="execution.trigger"
:execution
/>
<div id="chart"> <div id="chart">
<div> <div>
<section> <section>
@@ -143,7 +151,7 @@
</section> </section>
<TimeSeries <TimeSeries
ref="chartRef" ref="chartRef"
:chart :chart="{...chart, content: YAML_CHART}"
:filters :filters
showDefault showDefault
execution execution
@@ -177,9 +185,11 @@
import {useI18n} from "vue-i18n"; import {useI18n} from "vue-i18n";
const {t} = useI18n({useScope: "global"}); const {t} = useI18n({useScope: "global"});
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("md");
import moment from "moment"; import moment from "moment";
import {verticalLayout} from "./utils/layout";
import {createLink} from "./utils/links"; import {createLink} from "./utils/links";
import Utils from "../../../utils/utils"; import Utils from "../../../utils/utils";
import {FilterObject} from "../../../utils/filters"; import {FilterObject} from "../../../utils/filters";
@@ -192,7 +202,8 @@
import ErrorAlert from "./components/main/ErrorAlert.vue"; import ErrorAlert from "./components/main/ErrorAlert.vue";
import Id from "../../Id.vue"; import Id from "../../Id.vue";
import Cascader from "./components/main/cascaders/Cascader.vue"; import Cascader from "./components/main/Cascader.vue";
import TriggerCascader from "./components/main/TriggerCascader.vue";
import TimeSeries from "../../dashboard/sections/TimeSeries.vue"; import TimeSeries from "../../dashboard/sections/TimeSeries.vue";
import PrevNext from "./components/main/PrevNext.vue"; import PrevNext from "./components/main/PrevNext.vue";
@@ -421,21 +432,14 @@
title: t("flow_outputs"), title: t("flow_outputs"),
empty: t("no_flow_outputs"), empty: t("no_flow_outputs"),
elements: execution.value?.outputs, elements: execution.value?.outputs,
includeDebug: "outputs",
},
{
title: t("trigger"),
empty: t("no_trigger"),
elements: execution.value?.trigger,
includeDebug: "trigger",
}, },
]; ];
const options = useValues("executions").VALUES.RELATIVE_DATE; const options = useValues("executions").VALUES.RELATIVE_DATE.slice(0, -1); // Remove last 365 days option
const timerange = ref<string>("PT168H"); // Default to last 7 days const timerange = ref<string>("PT168H"); // Default to last 7 days
const chartRef = ref<InstanceType<typeof TimeSeries> | null>(null); const chartRef = ref<InstanceType<typeof TimeSeries> | null>(null);
const chart = {...yaml.parse(YAML_CHART), content: YAML_CHART}; const chart = yaml.parse(YAML_CHART);
const filters = computed((): FilterObject[] => { const filters = computed((): FilterObject[] => {
if (!execution.value) return []; if (!execution.value) return [];

View File

@@ -1,5 +1,5 @@
<template> <template>
<div :id="cascaderID"> <div :id="`cascader-${props.title}`">
<div class="header"> <div class="header">
<el-text truncated> <el-text truncated>
{{ props.title }} {{ props.title }}
@@ -12,86 +12,70 @@
/> />
</div> </div>
<template v-if="props.elements">
<el-splitter
v-if="props.includeDebug"
:layout="verticalLayout ? 'vertical' : 'horizontal'"
lazy
>
<el-splitter-panel :size="verticalLayout ? '50%' : '70%'">
<el-cascader-panel <el-cascader-panel
v-if="props.elements"
ref="cascader"
:options="filteredOptions" :options="filteredOptions"
@expand-change="(p: string[]) => (path = p.join('.'))"
class="debug"
> >
<template #default="{data}"> <template #default="{data}">
<div class="node"> <VarValue
<div :title="data.label"> v-if="isFile(data.value)"
{{ data.label }} :value="data.value"
</div> :execution="props.execution"
<div v-if="data.value && data.children"> class="node"
<code>{{ itemsCount(data) }}</code>
</div>
</div>
</template>
</el-cascader-panel>
</el-splitter-panel>
<el-splitter-panel>
<DebugPanel
:property="props.includeDebug"
:execution
:path
/> />
</el-splitter-panel> <div v-else class="node">
</el-splitter>
<el-cascader-panel v-else :options="filteredOptions">
<template #default="{data}">
<div class="node">
<div :title="data.label"> <div :title="data.label">
{{ data.label }} {{ data.label }}
</div> </div>
<div v-if="data.value && data.children"> <div v-if="data.value && data.children">
<code>{{ itemsCount(data) }}</code> <code>
{{ data.children.length }}
{{
$t(
data.children.length === 1
? "item"
: "items",
)
}}
</code>
</div> </div>
</div> </div>
</template> </template>
</el-cascader-panel> </el-cascader-panel>
</template>
<span v-else class="empty">{{ props.empty }}</span> <span v-else class="empty">{{ props.empty }}</span>
</div> </div>
</template> </template>
<script setup lang="ts"> <script setup lang="ts">
import {onMounted, nextTick, computed, ref} from "vue"; import {onMounted, computed, ref} from "vue";
import DebugPanel from "./DebugPanel.vue"; import VarValue from "../../../VarValue.vue";
import {Execution} from "../../../../../../stores/executions"; import {Execution} from "../../../../../stores/executions";
import {verticalLayout} from "../../../utils/layout";
import {useI18n} from "vue-i18n";
const {t} = useI18n({useScope: "global"});
import Magnify from "vue-material-design-icons/Magnify.vue"; import Magnify from "vue-material-design-icons/Magnify.vue";
export interface Node {
label: string;
value: string;
children?: Node[];
}
const props = defineProps<{ const props = defineProps<{
title: string; title: string;
empty: string; empty: string;
elements?: Record<string, any>; elements?: Record<string, any>;
includeDebug?: "outputs" | "trigger";
execution: Execution; execution: Execution;
}>(); }>();
const path = ref<string>(""); const isFile = (data: any) => {
if (typeof data !== "string") return false;
const prefixes = ["kestra:///", "file://", "nsfile://"];
return prefixes.some((prefix) => data.startsWith(prefix));
};
interface Node {
label: string;
value: string;
children?: Node[];
}
const formatted = ref<Node[]>([]); const formatted = ref<Node[]>([]);
const format = (obj: Record<string, any>): Node[] => { const format = (obj: Record<string, any>): Node[] => {
@@ -130,25 +114,15 @@
}); });
}); });
const itemsCount = (item: Node) => { const cascader = ref<any>(null);
const length = item.children?.length ?? 0; onMounted(() => {
if (!length) return undefined;
return `${length} ${length === 1 ? t("item") : t("items")}`;
};
const cascaderID = `cascader-${props.title.toLowerCase().replace(/\s+/g, "-")}`;
onMounted(async () => {
if (props.elements) formatted.value = format(props.elements); if (props.elements) formatted.value = format(props.elements);
await nextTick(() => {
// Open first node by default on page mount // Open first node by default on page mount
const selector = `#${cascaderID} .el-cascader-node`; if (cascader?.value) {
const nodes = document.querySelectorAll(selector); const nodes = cascader.value.$el.querySelectorAll(".el-cascader-node");
if (nodes.length > 0) (nodes[0] as HTMLElement).click(); if (nodes.length > 0) (nodes[0] as HTMLElement).click();
}); }
}); });
</script> </script>
@@ -180,12 +154,6 @@
.el-cascader-panel { .el-cascader-panel {
overflow: auto; overflow: auto;
&.debug {
min-height: -webkit-fill-available;
border-top-right-radius: 0;
border-bottom-right-radius: 0;
}
} }
.empty { .empty {

View File

@@ -0,0 +1,639 @@
<template>
<div :id="`cascader-${props.title}`">
<div class="header">
<el-text truncated>
{{ props.title }}
</el-text>
<el-input
v-if="props.elements"
v-model="filter"
:placeholder="$t('search')"
:suffixIcon="Magnify"
/>
</div>
<el-splitter
v-if="props.elements"
:layout="verticalLayout ? 'vertical' : 'horizontal'"
>
<el-splitter-panel
v-model:size="leftWidth"
:min="'30%'"
:max="'70%'"
>
<div class="d-flex flex-column overflow-x-auto left">
<ElCascaderPanel
ref="cascader"
v-model="selected"
:options="filteredOptions"
:border="false"
class="flex-grow-1 cascader"
@change="onSelectionChange"
>
<template #default="{data}">
<div
class="w-100 d-flex justify-content-between"
@click="onNodeClick(data)"
>
<div class="pe-5 d-flex">
<span>{{ data.label }}</span>
</div>
<code>
<span class="regular">
{{ processedValue(data).label }}
</span>
</code>
</div>
</template>
</ElCascaderPanel>
</div>
</el-splitter-panel>
<el-splitter-panel v-model:size="rightWidth">
<div class="right wrapper">
<div class="w-100 overflow-auto debug-wrapper">
<div class="debug">
<div class="debug-title mb-3">
<span>{{ $t("eval.render") }}</span>
</div>
<div class="d-flex flex-column p-3 debug">
<Editor
ref="debugEditor"
:fullHeight="false"
:customHeight="20"
:input="true"
:navbar="false"
:modelValue="computedDebugValue"
@update:model-value="editorValue = $event"
@confirm="onDebugExpression($event)"
class="w-100"
/>
<el-button
type="primary"
:icon="Refresh"
@click="
onDebugExpression(
editorValue.length > 0
? editorValue
: computedDebugValue,
)
"
class="mt-3"
>
{{ $t("eval.render") }}
</el-button>
<Editor
v-if="debugExpression"
:readOnly="true"
:input="true"
:fullHeight="false"
:customHeight="20"
:navbar="false"
:modelValue="debugExpression"
:lang="isJSON ? 'json' : ''"
class="mt-3"
/>
</div>
</div>
<el-alert
v-if="debugError"
type="error"
:closable="false"
class="overflow-auto"
>
<p>
<strong>{{ debugError }}</strong>
</p>
<div class="my-2">
<CopyToClipboard
:text="`${debugError}\n\n${debugStackTrace}`"
label="Copy Error"
class="d-inline-block me-2"
/>
</div>
<pre class="mb-0" style="overflow: scroll">{{
debugStackTrace
}}</pre>
</el-alert>
<VarValue
v-if="selectedValue && displayVarValue()"
:value="
selectedValue?.uri
? selectedValue?.uri
: selectedValue
"
:execution="execution"
/>
</div>
</div>
</el-splitter-panel>
</el-splitter>
<span v-else class="empty">{{ props.empty }}</span>
</div>
</template>
<script setup lang="ts">
import {ref, computed, watch, onMounted} from "vue";
import {ElCascaderPanel} from "element-plus";
import CopyToClipboard from "../../../../layout/CopyToClipboard.vue";
import Magnify from "vue-material-design-icons/Magnify.vue";
import Editor from "../../../../inputs/Editor.vue";
import VarValue from "../../../VarValue.vue";
import Refresh from "vue-material-design-icons/Refresh.vue";
onMounted(() => {
if (props.elements) formatted.value = format(props.elements);
// Open first node by default on page mount
if (cascader?.value) {
const nodes = cascader.value.$el.querySelectorAll(".el-cascader-node");
if (nodes.length > 0) (nodes[0] as HTMLElement).click();
}
});
interface CascaderOption {
label: string;
value: string;
children?: CascaderOption[];
path?: string;
[key: string]: any;
}
const props = defineProps<{
title: string;
empty: string;
elements?: CascaderOption;
execution: any;
}>();
const cascader = ref<any>(null);
const debugEditor = ref<InstanceType<typeof Editor>>();
const selected = ref<string[]>([]);
const editorValue = ref("");
const debugExpression = ref("");
const debugError = ref("");
const debugStackTrace = ref("");
const isJSON = ref(false);
const expandedValue = ref("");
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("md");
const leftWidth = verticalLayout ? ref("50%") : ref("80%");
const rightWidth = verticalLayout ? ref("50%") : ref("20%");
const formatted = ref<Node[]>([]);
const format = (obj: Record<string, any>): Node[] => {
return Object.entries(obj).map(([key, value]) => {
const children =
typeof value === "object" && value !== null
? Object.entries(value).map(([k, v]) => format({[k]: v})[0])
: [{label: value, value: value}];
// Filter out children with undefined label and value
const filteredChildren = children.filter(
(child) => child.label !== undefined || child.value !== undefined,
);
// Return node with or without children based on existence
const node = {label: key, value: key};
// Include children only if there are valid entries
if (filteredChildren.length) {
node.children = filteredChildren;
}
return node;
});
};
const filter = ref("");
const filteredOptions = computed(() => {
if (filter.value === "") return formatted.value;
const lowercase = filter.value.toLowerCase();
return formatted.value.filter((node) => {
const matchesNode = node.label.toLowerCase().includes(lowercase);
if (!node.children) return matchesNode;
const matchesChildren = node.children.some((c) =>
c.label.toLowerCase().includes(lowercase),
);
return matchesNode || matchesChildren;
});
});
const selectedValue = computed(() => {
if (!selected.value?.length) return null;
const node = selectedNode();
return node?.value || node?.label;
});
const computedDebugValue = computed(() => {
if (selected.value?.length) {
const path = selected.value.join(".");
return `{{ trigger.${path} }}`;
}
if (expandedValue.value) {
return `{{ trigger.${expandedValue.value} }}`;
}
return "{{ trigger }}";
});
function selectedNode(): CascaderOption | null {
if (!selected.value?.length) return null;
let currentOptions: CascaderOption[] = props.elements;
let currentNode: CascaderOption | undefined = undefined;
for (const value of selected.value) {
currentNode = currentOptions?.find(
(option) => option.value === value || option.label === value,
);
if (currentNode?.children) {
currentOptions = currentNode.children;
}
}
return currentNode || null;
}
function processedValue(data: any) {
const trim = (value: any) =>
typeof value !== "string" || value.length < 16
? value
: `${value.substring(0, 16)}...`;
return {
label: trim(data.value || data.label),
regular: typeof data.value !== "object",
};
}
function onNodeClick(data: any) {
let path = "";
if (selected.value?.length) {
path = selected.value.join(".");
}
if (!path) {
const findNodePath = (
options: Record<string, any>[],
targetNode: any,
currentPath: string[] = [],
): string[] | null => {
const localOptions = Array.isArray(options)
? options
: [options]
for (const option of localOptions) {
const newPath = [...currentPath, option.value || option.label];
if (
option.value === targetNode.value ||
option.label === targetNode.label ||
option.value === (targetNode.value || targetNode.label) ||
option.label === (targetNode.value || targetNode.label)
) {
return newPath;
}
if (option.children) {
const found = findNodePath(
option.children ?? [],
targetNode,
newPath,
);
if (found) return found;
}
}
return null;
};
const nodePath = findNodePath(props.elements ?? [], data);
path = nodePath ? nodePath.join(".") : "";
}
if (path) {
expandedValue.value = path;
debugExpression.value = "";
debugError.value = "";
debugStackTrace.value = "";
}
}
function onSelectionChange(value: any) {
if (value?.length) {
const path = value.join(".");
expandedValue.value = path;
debugExpression.value = "";
debugError.value = "";
debugStackTrace.value = "";
}
}
function displayVarValue(): boolean {
return Boolean(
selectedValue.value &&
typeof selectedValue.value === "string" &&
(selectedValue.value.startsWith("kestra://") ||
selectedValue.value.startsWith("http://") ||
selectedValue.value.startsWith("https://")),
);
}
function evaluateExpression(expression: string, trigger: any): any {
try {
const cleanExpression = expression
.replace(/^\{\{\s*/, "")
.replace(/\s*\}\}$/, "")
.trim();
if (cleanExpression === "trigger") {
return trigger;
}
if (!cleanExpression.startsWith("trigger.")) {
throw new Error("Expression must start with \"trigger.\"");
}
const path = cleanExpression.substring(8);
const parts = path.split(".");
let result = trigger;
for (const part of parts) {
if (result && typeof result === "object" && part in result) {
result = result[part];
} else {
throw new Error(`Property "${part}" not found`);
}
}
return result;
} catch (error: any) {
throw new Error(`Failed to evaluate expression: ${error.message}`);
}
}
function onDebugExpression(expression: string): void {
try {
debugError.value = "";
debugStackTrace.value = "";
const result = evaluateExpression(expression, props.execution?.trigger);
try {
if (typeof result === "object" && result !== null) {
debugExpression.value = JSON.stringify(result, null, 2);
isJSON.value = true;
} else {
debugExpression.value = String(result);
isJSON.value = false;
}
} catch {
debugExpression.value = String(result);
isJSON.value = false;
}
} catch (error: any) {
debugError.value = error.message || "Failed to evaluate expression";
debugStackTrace.value = error.stack || "";
debugExpression.value = "";
isJSON.value = false;
}
}
watch(
selected,
(newValue) => {
if (newValue?.length) {
const path = newValue.join(".");
expandedValue.value = path;
debugExpression.value = "";
debugError.value = "";
debugStackTrace.value = "";
}
},
{deep: true},
);
</script>
<style scoped lang="scss">
.outputs {
height: fit-content;
display: flex;
position: relative;
}
.left {
overflow-x: auto;
height: 100%;
display: flex;
flex-direction: column;
}
:deep(.el-cascader-panel) {
min-height: 197px;
height: 100%;
border: 1px solid var(--ks-border-primary);
border-radius: 0;
overflow-x: auto !important;
overflow-y: hidden !important;
.el-scrollbar.el-cascader-menu:nth-of-type(-n + 2) ul li:first-child {
pointer-events: auto !important;
margin: 0 !important;
}
.el-cascader-node {
pointer-events: auto !important;
cursor: pointer !important;
}
.el-cascader-panel__wrap {
overflow-x: auto !important;
display: flex !important;
min-width: max-content !important;
}
.el-cascader-menu {
min-width: 300px;
max-width: 300px;
flex-shrink: 0;
&:last-child {
border-right: 1px solid var(--ks-border-primary);
}
.el-cascader-menu__wrap {
height: 100%;
}
.el-cascader-node {
height: 36px;
line-height: 36px;
font-size: var(--el-font-size-small);
color: var(--ks-content-primary);
&[aria-haspopup="false"] {
padding-right: 0.5rem !important;
}
&:hover {
background-color: var(--ks-border-primary);
}
&.in-active-path,
&.is-active {
background-color: var(--ks-border-primary);
font-weight: normal;
}
.el-cascader-node__prefix {
display: none;
}
code span.regular {
color: var(--ks-content-primary);
}
}
}
}
:deep(.el-cascader-node) {
cursor: pointer;
margin: 0 !important;
}
.el-cascader-menu__list {
padding: 6px;
}
.wrapper {
height: fit-content;
overflow: hidden;
z-index: 1000;
height: 100%;
display: flex;
flex-direction: column;
.debug-wrapper {
min-height: 197px;
border: 1px solid var(--ks-border-primary);
border-left-width: 0.5px;
border-radius: 0;
padding: 0;
background-color: var(--ks-background-body);
flex: 1;
}
.debug-title {
padding: 12px 16px;
background-color: var(--ks-background-body);
font-weight: bold;
font-size: var(--el-font-size-base);
}
}
@media (max-width: 768px) {
.outputs {
height: 600px;
margin-top: 15px;
}
:deep(.el-cascader-panel) {
height: 100%;
}
}
@import "@kestra-io/ui-libs/src/scss/variables";
[id^="cascader-"] {
overflow: hidden;
.header {
display: flex;
justify-content: space-between;
align-items: center;
padding-bottom: $spacer;
> .el-text {
width: 100%;
display: flex;
align-items: center;
font-size: $font-size-xl;
}
> .el-input {
display: flex;
align-items: center;
width: calc($spacer * 16);
}
}
.el-cascader-panel {
overflow: auto;
}
.empty {
font-size: $font-size-sm;
color: var(--ks-content-secondary);
}
:deep(.el-cascader-menu) {
min-width: 300px;
max-width: 300px;
.el-cascader-menu__list {
padding: 0;
}
.el-cascader-menu__wrap {
height: 100%;
}
.node {
width: 100%;
display: flex;
justify-content: space-between;
}
& .el-cascader-node {
height: 36px;
line-height: 36px;
font-size: $font-size-sm;
color: var(--ks-content-primary);
padding: 0 30px 0 5px;
&[aria-haspopup="false"] {
padding-right: 0.5rem !important;
}
&:hover {
background-color: var(--ks-border-primary);
}
&.in-active-path,
&.is-active {
background-color: var(--ks-border-primary);
font-weight: normal;
}
.el-cascader-node__prefix {
display: none;
}
code span.regular {
color: var(--ks-content-primary);
}
}
}
}
</style>

View File

@@ -1,182 +0,0 @@
<template>
<div id="debug">
<Editor
v-model="expression"
:shouldFocus="false"
:navbar="false"
input
class="expression"
/>
<div class="buttons">
<el-button type="primary" :icon="Refresh" @click="onRender">
{{ $t("eval.render") }}
</el-button>
<el-button
:disabled="!result && !error"
:icon="CloseCircleOutline"
@click="clearAll"
/>
</div>
<template v-if="result">
<VarValue v-if="isFile" :value="result.value" :execution />
<Editor
v-else
v-model="result.value"
:shouldFocus="false"
:navbar="false"
input
readOnly
:lang="result.type"
class="result"
/>
</template>
<el-alert
v-else-if="error"
type="error"
:title="error"
showIcon
:closable="false"
/>
</div>
</template>
<script setup lang="ts">
import {watch, ref, computed} from "vue";
import Editor from "../../../../../inputs/Editor.vue";
import VarValue from "../../../../VarValue.vue";
import {Execution} from "../../../../../../stores/executions";
import Refresh from "vue-material-design-icons/Refresh.vue";
import CloseCircleOutline from "vue-material-design-icons/CloseCircleOutline.vue";
const props = defineProps<{
property: "outputs" | "trigger";
execution: Execution;
path: string;
}>();
const result = ref<{ value: string; type: string } | undefined>(undefined);
const error = ref<string | undefined>(undefined);
const clearAll = () => {
result.value = undefined;
error.value = undefined;
};
const isFile = computed(() => {
if (!result.value || typeof result.value.value !== "string") return false;
const prefixes = ["kestra:///", "file://", "nsfile://"];
return prefixes.some((prefix) => result.value!.value.startsWith(prefix));
});
const expression = ref<string>("");
watch(
() => props.path,
(path?: string) => {
result.value = undefined;
expression.value = `{{ ${props.property}${path ? `.${path}` : ""} }}`;
},
{immediate: true},
);
const onRender = () => {
if (!props.execution) return;
result.value = undefined;
error.value = undefined;
const clean = expression.value
.replace(/^\{\{\s*/, "")
.replace(/\s*\}\}$/, "")
.trim();
if (clean === "outputs" || clean === "trigger") {
result.value = {
value: JSON.stringify(props.execution[props.property], null, 2),
type: "json",
};
}
if (!clean.startsWith("outputs.") && !clean.startsWith("trigger.")) {
result.value = undefined;
error.value = `Expression must start with "{{ ${props.property}. }}"`;
return;
}
const parts = clean.substring(props.property.length + 1).split(".");
let target: any = props.execution[props.property];
for (const part of parts) {
if (target && typeof target === "object" && part in target) {
target = target[part];
} else {
result.value = undefined;
error.value = `Property "${part}" does not exist on ${props.property}`;
return;
}
}
if (target && typeof target === "object") {
result.value = {
value: JSON.stringify(target, null, 2),
type: "json",
};
} else {
result.value = {value: String(target), type: "text"};
}
};
</script>
<style scoped lang="scss">
@import "@kestra-io/ui-libs/src/scss/variables";
#debug {
display: flex;
flex-direction: column;
height: 100%;
padding: calc($spacer / 2) $spacer;
border: 1px solid var(--el-border-color-light);
:deep(.ks-editor) {
&.expression {
height: calc($spacer * 2);
margin-bottom: $spacer;
}
&.result {
height: calc($spacer * 10);
}
}
.buttons {
display: inline-flex;
& :deep(.el-button) {
width: 100%;
margin-bottom: $spacer;
padding: $spacer;
font-size: $font-size-sm;
overflow: hidden;
span:not(i span) {
display: block;
min-width: 0;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
}
& :deep(.el-button:nth-of-type(2)) {
width: calc($spacer * 4);
}
}
}
</style>

View File

@@ -1,3 +0,0 @@
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
export const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("md");

View File

@@ -108,8 +108,6 @@ export function useFilters(
const query = {...route.query}; const query = {...route.query};
clearFilterQueryParams(query); clearFilterQueryParams(query);
delete query.page;
if (legacyQuery) { if (legacyQuery) {
clearLegacyParams(query); clearLegacyParams(query);
buildLegacyQuery(query); buildLegacyQuery(query);

View File

@@ -67,7 +67,6 @@ export const useDashboardFilter = (): ComputedRef<FilterConfiguration> => {
const {VALUES} = useValues("executions"); const {VALUES} = useValues("executions");
return VALUES.EXECUTION_STATES; return VALUES.EXECUTION_STATES;
}, },
searchable: true,
showComparatorSelection: true showComparatorSelection: true
}, },
{ {

View File

@@ -21,7 +21,6 @@ export const useFlowExecutionFilter = (): ComputedRef<FilterConfiguration> => {
const {VALUES} = useValues("executions"); const {VALUES} = useValues("executions");
return VALUES.EXECUTION_STATES; return VALUES.EXECUTION_STATES;
}, },
searchable: true,
visibleByDefault: true visibleByDefault: true
}, },
{ {

View File

@@ -213,8 +213,6 @@
:filters="chartFilters()" :filters="chartFilters()"
showDefault showDefault
short short
:flow="scope.row.id"
:namespace="scope.row.namespace"
/> />
</template> </template>
</el-table-column> </el-table-column>

View File

@@ -3,17 +3,15 @@
<span v-for="trigger in triggers" :key="uid(trigger)" :id="uid(trigger)"> <span v-for="trigger in triggers" :key="uid(trigger)" :id="uid(trigger)">
<template v-if="trigger.disabled === undefined || trigger.disabled === false"> <template v-if="trigger.disabled === undefined || trigger.disabled === false">
<el-popover <el-popover
:ref="(el: any) => setPopoverRef(el, trigger)"
placement="left" placement="left"
:persistent="true" :persistent="true"
:title="`${$t('trigger details')}: ${trigger ? trigger.id : ''}`" :title="`${$t('trigger details')}: ${trigger ? trigger.id : ''}`"
:width="500" :width="500"
transition="" transition=""
:hideAfter="0" :hideAfter="0"
@show="handlePopoverShow"
> >
<template #reference> <template #reference>
<el-button class="trigger-icon" @click="copyLink(trigger)" size="small"> <el-button @click="copyLink(trigger)" size="small">
<TaskIcon :onlyIcon="true" :cls="trigger?.type" :icons="pluginsStore.icons" /> <TaskIcon :onlyIcon="true" :cls="trigger?.type" :icons="pluginsStore.icons" />
</el-button> </el-button>
</template> </template>
@@ -26,7 +24,7 @@
</div> </div>
</template> </template>
<script setup lang="ts"> <script setup lang="ts">
import {computed, ref, nextTick} from "vue"; import {computed} from "vue";
import {useRoute} from "vue-router"; import {useRoute} from "vue-router";
import {usePluginsStore} from "../../stores/plugins"; import {usePluginsStore} from "../../stores/plugins";
import Utils from "../../utils/utils"; import Utils from "../../utils/utils";
@@ -63,8 +61,6 @@
const pluginsStore = usePluginsStore(); const pluginsStore = usePluginsStore();
const route = useRoute(); const route = useRoute();
const popoverRefs = ref<Map<string, any>>(new Map());
const triggers = computed<Trigger[]>(() => { const triggers = computed<Trigger[]>(() => {
if (props.flow && props.flow.triggers) { if (props.flow && props.flow.triggers) {
return props.flow.triggers.filter( return props.flow.triggers.filter(
@@ -81,22 +77,6 @@
return (props.flow ? props.flow.namespace + "-" + props.flow.id : props.execution?.id) + "-" + trigger.id; return (props.flow ? props.flow.namespace + "-" + props.flow.id : props.execution?.id) + "-" + trigger.id;
} }
function setPopoverRef(el: any, trigger: Trigger) {
if (el) {
popoverRefs.value.set(uid(trigger), el);
}
}
function handlePopoverShow() {
nextTick(() => {
popoverRefs.value.forEach((popover) => {
if (popover?.popperRef?.popperInstanceRef) {
popover.popperRef.popperInstanceRef.update();
}
});
});
}
const {t} = useI18n(); const {t} = useI18n();
const toast = useToast(); const toast = useToast();
@@ -119,18 +99,12 @@
<style scoped lang="scss"> <style scoped lang="scss">
.trigger { .trigger {
max-width: 180px; max-width: 180px;
display: flex; overflow-x: auto;
justify-content: center;
} }
.trigger-icon { .el-button {
display: inline-flex !important; display: inline-flex !important;
align-items: center;
margin-right: .25rem; margin-right: .25rem;
border: none;
background-color: transparent;
padding: 2px;
cursor: default;
} }
:deep(div.wrapper) { :deep(div.wrapper) {

View File

@@ -16,12 +16,12 @@
/> />
<div v-else-if="invalidGraph"> <div v-else-if="invalidGraph">
<el-alert <el-alert
:title="$t('topology-graph.invalid')" :title="t('topology-graph.invalid')"
type="error" type="error"
class="invalid-graph" class="invalid-graph"
:closable="false" :closable="false"
> >
{{ $t('topology-graph.invalid_description') }} {{ t('topology-graph.invalid_description') }}
</el-alert> </el-alert>
</div> </div>
</div> </div>
@@ -29,12 +29,15 @@
<script setup lang="ts"> <script setup lang="ts">
import {computed, ref} from "vue"; import {computed, ref} from "vue";
import {useI18n} from "vue-i18n";
import {Utils} from "@kestra-io/ui-libs"; import {Utils} from "@kestra-io/ui-libs";
import LowCodeEditor from "./LowCodeEditor.vue"; import LowCodeEditor from "./LowCodeEditor.vue";
import {useFlowStore} from "../../stores/flow"; import {useFlowStore} from "../../stores/flow";
const flowStore = useFlowStore(); const flowStore = useFlowStore();
const {t} = useI18n();
const flowYaml = computed(() => flowStore.flowYaml); const flowYaml = computed(() => flowStore.flowYaml);
const flowGraph = computed(() => flowStore.flowGraph); const flowGraph = computed(() => flowStore.flowGraph);
const invalidGraph = computed(() => flowStore.invalidGraph); const invalidGraph = computed(() => flowStore.invalidGraph);

View File

@@ -7,14 +7,14 @@
:disabled="!playgroundStore.readyToStart" :disabled="!playgroundStore.readyToStart"
> >
<el-icon><Play /></el-icon> <el-icon><Play /></el-icon>
<span>{{ $t('playground.run_task') }}</span> <span>{{ t('playground.run_task') }}</span>
<template #dropdown> <template #dropdown>
<el-dropdown-menu> <el-dropdown-menu>
<el-dropdown-item :icon="Play" @click="playgroundStore.runUntilTask(taskId)"> <el-dropdown-item :icon="Play" @click="playgroundStore.runUntilTask(taskId)">
{{ $t('playground.run_this_task') }} {{ t('playground.run_this_task') }}
</el-dropdown-item> </el-dropdown-item>
<el-dropdown-item :icon="PlayBoxMultiple" @click="playgroundStore.runUntilTask(taskId, true)"> <el-dropdown-item :icon="PlayBoxMultiple" @click="playgroundStore.runUntilTask(taskId, true)">
{{ $t('playground.run_task_and_downstream') }} {{ t('playground.run_task_and_downstream') }}
</el-dropdown-item> </el-dropdown-item>
</el-dropdown-menu> </el-dropdown-menu>
</template> </template>
@@ -22,10 +22,12 @@
</template> </template>
<script setup lang="ts"> <script setup lang="ts">
import {useI18n} from "vue-i18n";
import {usePlaygroundStore} from "../../stores/playground"; import {usePlaygroundStore} from "../../stores/playground";
import Play from "vue-material-design-icons/Play.vue"; import Play from "vue-material-design-icons/Play.vue";
import PlayBoxMultiple from "vue-material-design-icons/PlayBoxMultiple.vue"; import PlayBoxMultiple from "vue-material-design-icons/PlayBoxMultiple.vue";
const {t} = useI18n();
const playgroundStore = usePlaygroundStore(); const playgroundStore = usePlaygroundStore();
defineProps<{ defineProps<{

View File

@@ -12,7 +12,6 @@
import {useCoreStore} from "../../stores/core"; import {useCoreStore} from "../../stores/core";
import {useMiscStore} from "override/stores/misc"; import {useMiscStore} from "override/stores/misc";
import {computed, onMounted} from "vue"; import {computed, onMounted} from "vue";
import {useLayoutStore} from "../../stores/layout";
const coreStore = useCoreStore(); const coreStore = useCoreStore();
const miscStore = useMiscStore(); const miscStore = useMiscStore();
@@ -23,9 +22,7 @@
document.getElementsByTagName("html")[0].classList.remove(collapse ? "menu-not-collapsed" : "menu-collapsed"); document.getElementsByTagName("html")[0].classList.remove(collapse ? "menu-not-collapsed" : "menu-collapsed");
} }
const layoutStore = useLayoutStore();
onMounted(() => { onMounted(() => {
onMenuCollapse(Boolean(layoutStore.sideMenuCollapsed)) onMenuCollapse(localStorage.getItem("menuCollapsed") === "true")
}); });
</script> </script>

View File

@@ -28,7 +28,7 @@
</template> </template>
<script setup lang="ts"> <script setup lang="ts">
import {onUpdated, computed, h, watch} from "vue"; import {onUpdated, ref, computed, h, watch} from "vue";
import {useI18n} from "vue-i18n"; import {useI18n} from "vue-i18n";
import {useRoute} from "vue-router"; import {useRoute} from "vue-router";
import {useMediaQuery} from "@vueuse/core"; import {useMediaQuery} from "@vueuse/core";
@@ -118,10 +118,7 @@
]; ];
}); });
const collapsed = computed({ const collapsed = ref(localStorage.getItem("menuCollapsed") === "true")
get: () => layoutStore.sideMenuCollapsed,
set: (v: boolean) => layoutStore.setSideMenuCollapsed(v),
})
const isSmallScreen = useMediaQuery("(max-width: 768px)") const isSmallScreen = useMediaQuery("(max-width: 768px)")

View File

@@ -60,7 +60,7 @@
@click="activeFlow = flowIndex" @click="activeFlow = flowIndex"
> >
<p class="title mb-2"> <p class="title mb-2">
{{ flow.labels?.find(l => l.key === 'name')?.value ?? flow.id }} {{ flow.description }}
</p> </p>
<div> <div>
<div <div

View File

@@ -25,6 +25,10 @@ const handleAuthError = (error, to) => {
initApp(app, routes, null, en).then(({router, piniaStore}) => { initApp(app, routes, null, en).then(({router, piniaStore}) => {
router.beforeEach(async (to, from, next) => { router.beforeEach(async (to, from, next) => {
if (to.meta?.anonymous === true) {
return next();
}
if(to.path === from.path && to.query === from.query) { if(to.path === from.path && to.query === from.query) {
return next(); // Prevent navigation if the path and query are the same return next(); // Prevent navigation if the path and query are the same
} }
@@ -41,28 +45,13 @@ initApp(app, routes, null, en).then(({router, piniaStore}) => {
if (validationErrors?.length > 0) { if (validationErrors?.length > 0) {
// Creds exist in config but failed validation // Creds exist in config but failed validation
// Route to login to show errors // Route to login to show errors
if (to.name === "login") {
return next();
}
return next({name: "login"}) return next({name: "login"})
} else { } else {
// No creds in config - redirect to set it up // No creds in config - redirect to set it up
if (to.name === "setup") {
return next();
}
return next({name: "setup"}) return next({name: "setup"})
} }
} }
if (to.meta?.anonymous === true) {
if (to.name === "setup") {
return next({name: "login"});
}
return next();
}
const hasCredentials = BasicAuth.isLoggedIn() const hasCredentials = BasicAuth.isLoggedIn()
if (!hasCredentials) { if (!hasCredentials) {
@@ -103,6 +92,6 @@ initApp(app, routes, null, en).then(({router, piniaStore}) => {
}, null, router, true); }, null, router, true);
// mount // mount
router.isReady().then(() => app.mount("#app")) app.mount("#app")
}); });

View File

@@ -67,7 +67,6 @@
</div> </div>
<div class="action-button"> <div class="action-button">
<slot name="buttons" :blueprint="blueprint" />
<el-tooltip v-if="embed && !system" trigger="click" content="Copied" placement="left" :autoClose="2000" effect="light"> <el-tooltip v-if="embed && !system" trigger="click" content="Copied" placement="left" :autoClose="2000" effect="light">
<el-button <el-button
type="primary" type="primary"

View File

@@ -12,13 +12,7 @@ export const useLayoutStore = defineStore("layout", {
topNavbar: undefined, topNavbar: undefined,
envName: localStorage.getItem("envName") || undefined, envName: localStorage.getItem("envName") || undefined,
envColor: localStorage.getItem("envColor") || undefined, envColor: localStorage.getItem("envColor") || undefined,
sideMenuCollapsed: (() => { sideMenuCollapsed: localStorage.getItem("menuCollapsed") === "true",
if (typeof window === "undefined") {
return false;
}
return localStorage.getItem("menuCollapsed") === "true" || window.matchMedia("(max-width: 768px)").matches;
})(),
}), }),
getters: {}, getters: {},
actions: { actions: {

View File

@@ -20,17 +20,6 @@
url('../../src/assets/fonts/public-sans/public-sans-v21-latin-regular.woff2') format('woff2'); url('../../src/assets/fonts/public-sans/public-sans-v21-latin-regular.woff2') format('woff2');
} }
@font-face {
font-family: 'Public Sans';
font-style: normal;
font-weight: 600;
font-display: swap;
src:
local('Public Sans SemiBold'),
local('PublicSans-SemiBold'),
url('../../src/assets/fonts/public-sans/public-sans-v21-latin-600.woff2') format('woff2');
}
@font-face { @font-face {
font-family: 'Public Sans'; font-family: 'Public Sans';
font-style: normal; font-style: normal;

View File

@@ -1612,8 +1612,7 @@
"email": "E-Mail", "email": "E-Mail",
"firstName": "Vorname", "firstName": "Vorname",
"lastName": "Nachname", "lastName": "Nachname",
"password": "Passwort", "password": "Passwort"
"password_requirements": "Das Passwort muss mindestens 8 Zeichen lang sein und mindestens 1 Großbuchstaben und 1 Zahl enthalten."
}, },
"login": "Anmelden", "login": "Anmelden",
"logout": "Abmelden", "logout": "Abmelden",

View File

@@ -262,7 +262,7 @@
"output": "Output", "output": "Output",
"eval": { "eval": {
"title": "Debug Expression", "title": "Debug Expression",
"render": "Render", "render": "Render Expression",
"tooltip": "Render any Pebble expression and inspect the Execution context." "tooltip": "Render any Pebble expression and inspect the Execution context."
}, },
"attempt": "Attempt", "attempt": "Attempt",
@@ -1477,8 +1477,7 @@
"email": "Email", "email": "Email",
"firstName": "First Name", "firstName": "First Name",
"lastName": "Last Name", "lastName": "Last Name",
"password": "Password", "password": "Password"
"password_requirements": "Password must be at least 8 characters long and include at least 1 uppercase letter and 1 number."
}, },
"validation": { "validation": {
"email_required": "Email is required", "email_required": "Email is required",

View File

@@ -1612,8 +1612,7 @@
"email": "Correo electrónico", "email": "Correo electrónico",
"firstName": "Nombre", "firstName": "Nombre",
"lastName": "Apellido", "lastName": "Apellido",
"password": "Contraseña", "password": "Contraseña"
"password_requirements": "La contraseña debe tener al menos 8 caracteres y contener al menos 1 letra mayúscula y 1 número."
}, },
"login": "Iniciar sesión", "login": "Iniciar sesión",
"logout": "Cerrar sesión", "logout": "Cerrar sesión",

View File

@@ -1612,8 +1612,7 @@
"email": "E-mail", "email": "E-mail",
"firstName": "Prénom", "firstName": "Prénom",
"lastName": "Nom de famille", "lastName": "Nom de famille",
"password": "Mot de passe", "password": "Mot de passe"
"password_requirements": "Le mot de passe doit comporter au moins 8 caractères, inclure au moins 1 lettre majuscule et 1 chiffre."
}, },
"login": "Connexion", "login": "Connexion",
"logout": "Déconnexion", "logout": "Déconnexion",

View File

@@ -1612,8 +1612,7 @@
"email": "ईमेल", "email": "ईमेल",
"firstName": "पहला नाम", "firstName": "पहला नाम",
"lastName": "अंतिम नाम", "lastName": "अंतिम नाम",
"password": "पासवर्ड", "password": "पासवर्ड"
"password_requirements": "पासवर्ड कम से कम 8 अक्षरों का होना चाहिए और इसमें कम से कम 1 बड़ा अक्षर और 1 संख्या शामिल होनी चाहिए।"
}, },
"login": "लॉगिन", "login": "लॉगिन",
"logout": "लॉगआउट", "logout": "लॉगआउट",

View File

@@ -1612,8 +1612,7 @@
"email": "Email", "email": "Email",
"firstName": "Nome", "firstName": "Nome",
"lastName": "Cognome", "lastName": "Cognome",
"password": "Password", "password": "Password"
"password_requirements": "La password deve essere lunga almeno 8 caratteri e includere almeno 1 lettera maiuscola e 1 numero."
}, },
"login": "Accedi", "login": "Accedi",
"logout": "Logout", "logout": "Logout",

View File

@@ -1612,8 +1612,7 @@
"email": "メール", "email": "メール",
"firstName": "名", "firstName": "名",
"lastName": "姓", "lastName": "姓",
"password": "パスワード", "password": "パスワード"
"password_requirements": "パスワードは8文字以上で、少なくとも1つの大文字と1つの数字を含める必要があります。"
}, },
"login": "ログイン", "login": "ログイン",
"logout": "ログアウト", "logout": "ログアウト",

View File

@@ -1612,8 +1612,7 @@
"email": "이메일", "email": "이메일",
"firstName": "이름", "firstName": "이름",
"lastName": "성씨", "lastName": "성씨",
"password": "비밀번호", "password": "비밀번호"
"password_requirements": "비밀번호는 최소 8자 이상이어야 하며, 최소 1개의 대문자와 1개의 숫자를 포함해야 합니다."
}, },
"login": "로그인", "login": "로그인",
"logout": "로그아웃", "logout": "로그아웃",

View File

@@ -1612,8 +1612,7 @@
"email": "Email", "email": "Email",
"firstName": "Imię", "firstName": "Imię",
"lastName": "Nazwisko", "lastName": "Nazwisko",
"password": "Hasło", "password": "Hasło"
"password_requirements": "Hasło musi mieć co najmniej 8 znaków i zawierać co najmniej 1 wielką literę oraz 1 cyfrę."
}, },
"login": "Zaloguj się", "login": "Zaloguj się",
"logout": "Wyloguj się", "logout": "Wyloguj się",

View File

@@ -1612,8 +1612,7 @@
"email": "Email", "email": "Email",
"firstName": "Nome", "firstName": "Nome",
"lastName": "Sobrenome", "lastName": "Sobrenome",
"password": "Senha", "password": "Senha"
"password_requirements": "A senha deve ter pelo menos 8 caracteres e incluir pelo menos 1 letra maiúscula e 1 número."
}, },
"login": "Login", "login": "Login",
"logout": "Sair", "logout": "Sair",

View File

@@ -1612,8 +1612,7 @@
"email": "Email", "email": "Email",
"firstName": "Nome", "firstName": "Nome",
"lastName": "Sobrenome", "lastName": "Sobrenome",
"password": "Senha", "password": "Senha"
"password_requirements": "A senha deve ter pelo menos 8 caracteres e incluir pelo menos 1 letra maiúscula e 1 número."
}, },
"login": "Login", "login": "Login",
"logout": "Sair", "logout": "Sair",

View File

@@ -1612,8 +1612,7 @@
"email": "Электронная почта", "email": "Электронная почта",
"firstName": "Имя", "firstName": "Имя",
"lastName": "Фамилия", "lastName": "Фамилия",
"password": "Пароль", "password": "Пароль"
"password_requirements": "Пароль должен содержать не менее 8 символов, включая как минимум 1 заглавную букву и 1 цифру."
}, },
"login": "Войти", "login": "Войти",
"logout": "Выход", "logout": "Выход",

Some files were not shown because too many files have changed in this diff Show More