mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 14:00:23 -05:00
Compare commits
64 Commits
run-develo
...
plugin/tem
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0a121c6979 | ||
|
|
69a793b227 | ||
|
|
35ccb3e39b | ||
|
|
3a7fcb2aa1 | ||
|
|
103c5b92e9 | ||
|
|
5253eeef95 | ||
|
|
848f835191 | ||
|
|
3e55e67534 | ||
|
|
7bca8b4924 | ||
|
|
56febfb415 | ||
|
|
925b8c6954 | ||
|
|
708816fe67 | ||
|
|
5502473fa4 | ||
|
|
c6cf0147a4 | ||
|
|
2951f4b4bc | ||
|
|
4ea13e258b | ||
|
|
3f8dcb47fd | ||
|
|
42dc3b930c | ||
|
|
97a78abd28 | ||
|
|
b3b2ef1b5a | ||
|
|
596a26a137 | ||
|
|
8a9a1df436 | ||
|
|
55d0880ed3 | ||
|
|
a74ebd5cd6 | ||
|
|
f3aed38964 | ||
|
|
2595e56199 | ||
|
|
e821bd7f65 | ||
|
|
09762d2a8d | ||
|
|
018c22918f | ||
|
|
3e9c8cf7da | ||
|
|
008404e442 | ||
|
|
2b224bcde8 | ||
|
|
1977b61693 | ||
|
|
8e2267f86c | ||
|
|
24355c2a88 | ||
|
|
51adcfa908 | ||
|
|
a55baa1f96 | ||
|
|
32793fde18 | ||
|
|
4381d585ec | ||
|
|
e595e26c45 | ||
|
|
b833cf28b5 | ||
|
|
ac11e9545c | ||
|
|
a07df5f6cd | ||
|
|
f626c85346 | ||
|
|
e15b53ebb5 | ||
|
|
7edb6bc379 | ||
|
|
78c81f932b | ||
|
|
56bb3ca29c | ||
|
|
14029e8c14 | ||
|
|
bea3d63d89 | ||
|
|
24a3bbd303 | ||
|
|
f9932af2e8 | ||
|
|
e0410c8f24 | ||
|
|
424a6cb41a | ||
|
|
afde71e913 | ||
|
|
086c32e711 | ||
|
|
710abcfaac | ||
|
|
be951d015c | ||
|
|
a07260bef4 | ||
|
|
dd19f8391d | ||
|
|
354873e220 | ||
|
|
386d4a15f0 | ||
|
|
1b75f15680 | ||
|
|
957bf74d97 |
1
.github/workflows/main-build.yml
vendored
1
.github/workflows/main-build.yml
vendored
@@ -64,6 +64,7 @@ jobs:
|
|||||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||||
|
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||||
|
|
||||||
|
|
||||||
publish-develop-maven:
|
publish-develop-maven:
|
||||||
|
|||||||
1
.github/workflows/release-docker.yml
vendored
1
.github/workflows/release-docker.yml
vendored
@@ -32,3 +32,4 @@ jobs:
|
|||||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||||
|
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||||
@@ -21,7 +21,7 @@ plugins {
|
|||||||
|
|
||||||
// test
|
// test
|
||||||
id "com.adarshr.test-logger" version "4.0.0"
|
id "com.adarshr.test-logger" version "4.0.0"
|
||||||
id "org.sonarqube" version "7.1.0.6387"
|
id "org.sonarqube" version "7.2.0.6526"
|
||||||
id 'jacoco-report-aggregation'
|
id 'jacoco-report-aggregation'
|
||||||
|
|
||||||
// helper
|
// helper
|
||||||
|
|||||||
@@ -4,13 +4,16 @@ import io.kestra.core.utils.MapUtils;
|
|||||||
import io.swagger.v3.oas.annotations.media.Schema;
|
import io.swagger.v3.oas.annotations.media.Schema;
|
||||||
import jakarta.annotation.Nullable;
|
import jakarta.annotation.Nullable;
|
||||||
import jakarta.validation.constraints.NotEmpty;
|
import jakarta.validation.constraints.NotEmpty;
|
||||||
|
import jakarta.validation.constraints.Pattern;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
|
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
|
||||||
public record Label(@NotEmpty String key, @NotEmpty String value) {
|
public record Label(
|
||||||
|
@NotEmpty @Pattern(regexp = "^[\\p{Ll}][\\p{L}0-9._-]*$", message = "Invalid label key. A valid key contains only lowercase letters numbers hyphens (-) underscores (_) or periods (.) and must begin with a lowercase letter.") String key,
|
||||||
|
@NotEmpty String value) {
|
||||||
public static final String SYSTEM_PREFIX = "system.";
|
public static final String SYSTEM_PREFIX = "system.";
|
||||||
|
|
||||||
// system labels
|
// system labels
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ public record QueryFilter(
|
|||||||
KIND("kind") {
|
KIND("kind") {
|
||||||
@Override
|
@Override
|
||||||
public List<Op> supportedOp() {
|
public List<Op> supportedOp() {
|
||||||
return List.of(Op.EQUALS,Op.NOT_EQUALS);
|
return List.of(Op.EQUALS,Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
LABELS("labels") {
|
LABELS("labels") {
|
||||||
@@ -106,7 +106,7 @@ public record QueryFilter(
|
|||||||
FLOW_ID("flowId") {
|
FLOW_ID("flowId") {
|
||||||
@Override
|
@Override
|
||||||
public List<Op> supportedOp() {
|
public List<Op> supportedOp() {
|
||||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
|
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
UPDATED("updated") {
|
UPDATED("updated") {
|
||||||
@@ -226,7 +226,7 @@ public record QueryFilter(
|
|||||||
FLOW {
|
FLOW {
|
||||||
@Override
|
@Override
|
||||||
public List<Field> supportedField() {
|
public List<Field> supportedField() {
|
||||||
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
|
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE, Field.FLOW_ID);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
NAMESPACE {
|
NAMESPACE {
|
||||||
@@ -241,7 +241,7 @@ public record QueryFilter(
|
|||||||
return List.of(
|
return List.of(
|
||||||
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
|
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
|
||||||
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
|
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
|
||||||
Field.NAMESPACE,Field.KIND
|
Field.NAMESPACE, Field.KIND
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -267,6 +267,10 @@ public class State {
|
|||||||
return this == Type.RUNNING || this == Type.KILLING;
|
return this == Type.RUNNING || this == Type.KILLING;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean onlyRunning() {
|
||||||
|
return this == Type.RUNNING;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean isFailed() {
|
public boolean isFailed() {
|
||||||
return this == Type.FAILED;
|
return this == Type.FAILED;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ public class Property<T> {
|
|||||||
* @return a new {@link Property} without a pre-rendered value
|
* @return a new {@link Property} without a pre-rendered value
|
||||||
*/
|
*/
|
||||||
public Property<T> skipCache() {
|
public Property<T> skipCache() {
|
||||||
return Property.ofExpression(expression);
|
return new Property<>(expression, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -158,11 +158,7 @@ public class FlowInputOutput {
|
|||||||
File tempFile = File.createTempFile(prefix, fileExtension);
|
File tempFile = File.createTempFile(prefix, fileExtension);
|
||||||
try (var inputStream = fileUpload.getInputStream();
|
try (var inputStream = fileUpload.getInputStream();
|
||||||
var outputStream = new FileOutputStream(tempFile)) {
|
var outputStream = new FileOutputStream(tempFile)) {
|
||||||
long transferredBytes = inputStream.transferTo(outputStream);
|
inputStream.transferTo(outputStream);
|
||||||
if (transferredBytes == 0) {
|
|
||||||
sink.error(new KestraRuntimeException("Can't upload file: " + fileUpload.getFilename()));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
URI from = storageInterface.from(execution, inputId, fileName, tempFile);
|
URI from = storageInterface.from(execution, inputId, fileName, tempFile);
|
||||||
sink.next(Map.entry(inputId, from.toString()));
|
sink.next(Map.entry(inputId, from.toString()));
|
||||||
} finally {
|
} finally {
|
||||||
@@ -382,11 +378,11 @@ public class FlowInputOutput {
|
|||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||||
return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
|
return Property.as((Property<T>) input.getDefaults().skipCache(), renderer, clazz);
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
|
||||||
return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
|
return Property.asList((Property<List<T>>) input.getDefaults().skipCache(), renderer, clazz);
|
||||||
}
|
}
|
||||||
|
|
||||||
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
|
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies, final boolean decryptSecrets) {
|
||||||
@@ -502,8 +498,8 @@ public class FlowInputOutput {
|
|||||||
yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString()));
|
yield storageInterface.from(execution, id, current.toString().substring(current.toString().lastIndexOf("/") + 1), new File(current.toString()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case JSON -> JacksonMapper.toObject(current.toString());
|
case JSON -> (current instanceof Map || current instanceof Collection<?>) ? current : JacksonMapper.toObject(current.toString());
|
||||||
case YAML -> YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
|
case YAML -> (current instanceof Map || current instanceof Collection<?>) ? current : YAML_MAPPER.readValue(current.toString(), JacksonMapper.OBJECT_TYPE_REFERENCE);
|
||||||
case URI -> {
|
case URI -> {
|
||||||
Matcher matcher = URI_PATTERN.matcher(current.toString());
|
Matcher matcher = URI_PATTERN.matcher(current.toString());
|
||||||
if (matcher.matches()) {
|
if (matcher.matches()) {
|
||||||
|
|||||||
@@ -65,10 +65,9 @@ public class ListUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> convertToListString(Object object){
|
public static List<String> convertToListString(Object object){
|
||||||
if (object instanceof List<?> list && (list.isEmpty() || list.getFirst() instanceof String)) {
|
return convertToList(object)
|
||||||
return (List<String>) list;
|
.stream()
|
||||||
} else {
|
.map(Object::toString)
|
||||||
throw new IllegalArgumentException("%s in not an instance of List of String".formatted(object));
|
.toList();
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,10 +32,12 @@ public class NamespaceFilesUtils {
|
|||||||
private ExecutorsUtils executorsUtils;
|
private ExecutorsUtils executorsUtils;
|
||||||
|
|
||||||
private ExecutorService executorService;
|
private ExecutorService executorService;
|
||||||
|
private int maxThreads;
|
||||||
|
|
||||||
@PostConstruct
|
@PostConstruct
|
||||||
public void postConstruct() {
|
public void postConstruct() {
|
||||||
this.executorService = executorsUtils.maxCachedThreadPool(Math.max(Runtime.getRuntime().availableProcessors() * 4, 32), "namespace-file");
|
this.maxThreads = Math.max(Runtime.getRuntime().availableProcessors() * 4, 32);
|
||||||
|
this.executorService = executorsUtils.maxCachedThreadPool(maxThreads, "namespace-file");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void loadNamespaceFiles(
|
public void loadNamespaceFiles(
|
||||||
@@ -63,7 +65,11 @@ public class NamespaceFilesUtils {
|
|||||||
matchedNamespaceFiles.addAll(files);
|
matchedNamespaceFiles.addAll(files);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use half of the available threads to avoid impacting concurrent tasks
|
||||||
|
int parallelism = maxThreads / 2;
|
||||||
Flux.fromIterable(matchedNamespaceFiles)
|
Flux.fromIterable(matchedNamespaceFiles)
|
||||||
|
.parallel(parallelism)
|
||||||
|
.runOn(Schedulers.fromExecutorService(executorService))
|
||||||
.doOnNext(throwConsumer(nsFile -> {
|
.doOnNext(throwConsumer(nsFile -> {
|
||||||
InputStream content = runContext.storage().getFile(nsFile.uri());
|
InputStream content = runContext.storage().getFile(nsFile.uri());
|
||||||
Path path = folderPerNamespace ?
|
Path path = folderPerNamespace ?
|
||||||
@@ -71,7 +77,7 @@ public class NamespaceFilesUtils {
|
|||||||
Path.of(nsFile.path());
|
Path.of(nsFile.path());
|
||||||
runContext.workingDir().putFile(path, content, fileExistComportment);
|
runContext.workingDir().putFile(path, content, fileExistComportment);
|
||||||
}))
|
}))
|
||||||
.publishOn(Schedulers.fromExecutorService(executorService))
|
.sequential()
|
||||||
.blockLast();
|
.blockLast();
|
||||||
|
|
||||||
Duration duration = stopWatch.getDuration();
|
Duration duration = stopWatch.getDuration();
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ public class LoopUntil extends Task implements FlowableTask<LoopUntil.Output> {
|
|||||||
|
|
||||||
public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException {
|
public Instant nextExecutionDate(RunContext runContext, Execution execution, TaskRun parentTaskRun) throws IllegalVariableEvaluationException {
|
||||||
if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) {
|
if (!this.reachedMaximums(runContext, execution, parentTaskRun, false)) {
|
||||||
String continueLoop = runContext.render(this.condition).as(String.class).orElse(null);
|
String continueLoop = runContext.render(this.condition).skipCache().as(String.class).orElse(null);
|
||||||
if (!TruthUtils.isTruthy(continueLoop)) {
|
if (!TruthUtils.isTruthy(continueLoop)) {
|
||||||
return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow());
|
return Instant.now().plus(runContext.render(this.getCheckFrequency().getInterval()).as(Duration.class).orElseThrow());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ public class Switch extends Task implements FlowableTask<Switch.Output> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException {
|
private String rendererValue(RunContext runContext) throws IllegalVariableEvaluationException {
|
||||||
return runContext.render(this.value).as(String.class).orElseThrow();
|
return runContext.render(this.value).skipCache().as(String.class).orElseThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -32,9 +32,17 @@ import lombok.experimental.SuperBuilder;
|
|||||||
examples = {
|
examples = {
|
||||||
@Example(
|
@Example(
|
||||||
code = """
|
code = """
|
||||||
spec: |
|
id: templated_task
|
||||||
type: io.kestra.plugin.core.http.Download
|
namespace: company.team
|
||||||
{{ task.property }}: {{ task.value }}
|
variables:
|
||||||
|
property: uri
|
||||||
|
value: https://kestra.io
|
||||||
|
tasks:
|
||||||
|
- id: templated_task
|
||||||
|
type: io.kestra.plugin.core.templating.TemplatedTask
|
||||||
|
spec: |
|
||||||
|
type: io.kestra.plugin.core.http.Download
|
||||||
|
{{ vars.property }}: {{ vars.value }}
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
|||||||
8
core/src/main/resources/metadata/chart.yaml
Normal file
8
core/src/main/resources/metadata/chart.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.chart
|
||||||
|
name: "chart"
|
||||||
|
title: "Chart"
|
||||||
|
description: "Tasks that render dashboard charts from Kestra data sources."
|
||||||
|
body: "Use these chart widgets to visualize metrics, executions, or flow trends in dashboards; pair them with dashboard data queries and configure aggregations, groupings, and chart options for Bar, Pie, Time Series, KPI, or Table outputs."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/condition.yaml
Normal file
8
core/src/main/resources/metadata/condition.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.condition
|
||||||
|
name: "condition"
|
||||||
|
title: "Condition"
|
||||||
|
description: "Tasks that evaluate conditions to control flow execution or triggers."
|
||||||
|
body: "Use these predicates to gate tasks or triggers based on time windows, calendars, execution metadata, labels, namespaces, retries, or custom expressions; configure required parameters such as allowed states, namespaces, date ranges, or JEXL expressions to return a true/false result."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/data.yaml
Normal file
8
core/src/main/resources/metadata/data.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.data
|
||||||
|
name: "data"
|
||||||
|
title: "Data"
|
||||||
|
description: "Tasks that fetch Kestra executions, flows, logs, metrics, and triggers as datasets for dashboards."
|
||||||
|
body: "These data providers query Kestra repositories with filters and aggregations to feed dashboard charts; configure columns and fields (such as namespace, state, timestamp, or labels) plus any filters to shape the returned dataset for visualization."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/debug.yaml
Normal file
8
core/src/main/resources/metadata/debug.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.debug
|
||||||
|
name: "debug"
|
||||||
|
title: "Debug"
|
||||||
|
description: "Tasks that emit debug output while you develop a flow."
|
||||||
|
body: "Echo and Return help inspect variables and payloads or short-circuit execution during testing; provide the message or value to output so downstream tasks can see exactly what is being passed around."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/execution.yaml
Normal file
8
core/src/main/resources/metadata/execution.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.execution
|
||||||
|
name: "execution"
|
||||||
|
title: "Execution"
|
||||||
|
description: "Tasks that manage the lifecycle and context of a running execution."
|
||||||
|
body: "Use these tasks to assert expectations, set or unset variables, add labels, fail, exit, resume, or purge executions; supply required properties such as variable maps, label key/values, or retention rules before altering execution state."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/flow.yaml
Normal file
8
core/src/main/resources/metadata/flow.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.flow
|
||||||
|
name: "flow"
|
||||||
|
title: "Flow"
|
||||||
|
description: "Tasks that orchestrate control flow within a Kestra pipeline."
|
||||||
|
body: "Sequence, branch, loop, parallelize, or nest subflows/templates using these primitives; define embedded task lists, values for switches, iteration collections, working directories, and loop exit criteria to structure complex workflows cleanly."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/http.yaml
Normal file
8
core/src/main/resources/metadata/http.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.http
|
||||||
|
name: "http"
|
||||||
|
title: "HTTP"
|
||||||
|
description: "Tasks that interact with HTTP endpoints."
|
||||||
|
body: "Perform requests, downloads, or webhook triggers with configurable methods, headers, authentication, and payloads; provide the target URI plus any body or query parameters, and use response handling options to store results for downstream tasks."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/index.yaml
Normal file
8
core/src/main/resources/metadata/index.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core
|
||||||
|
name: "core"
|
||||||
|
title: "Core Plugins and tasks"
|
||||||
|
description: "Tasks that provide Kestra's built-in orchestration, I/O, and observability capabilities."
|
||||||
|
body: "Core plugins cover control-flow, execution management, triggers, storage, HTTP, metrics, logging, templating, and dashboard widgets; combine these foundational tasks to build reliable workflows without adding external dependencies."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/kv.yaml
Normal file
8
core/src/main/resources/metadata/kv.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.kv
|
||||||
|
name: "kv"
|
||||||
|
title: "KV"
|
||||||
|
description: "Tasks that manage key-value pairs in Kestra's KV store."
|
||||||
|
body: "Set, get, list, version, and delete namespaced keys to share state across flows; specify the key path, value for writes, and optional namespace or TTL to control how data is stored, retrieved, and purged."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/log.yaml
Normal file
8
core/src/main/resources/metadata/log.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.log
|
||||||
|
name: "log"
|
||||||
|
title: "Log"
|
||||||
|
description: "Tasks that write, fetch, or purge Kestra logs."
|
||||||
|
body: "Emit structured log messages, retrieve stored logs, or clean up log storage; provide message content or log query filters and consider namespace or execution scoping when purging."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/metric.yaml
Normal file
8
core/src/main/resources/metadata/metric.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.metric
|
||||||
|
name: "metric"
|
||||||
|
title: "Metric"
|
||||||
|
description: "Tasks that publish custom metrics from flows."
|
||||||
|
body: "Send counters, gauges, and timing metrics to Kestra's metric store for dashboards and alerts; define the metric name, type, value, labels, and optional timestamp to record meaningful telemetry."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/namespace.yaml
Normal file
8
core/src/main/resources/metadata/namespace.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.namespace
|
||||||
|
name: "namespace"
|
||||||
|
title: "Namespace"
|
||||||
|
description: "Tasks that manage namespace files and versions."
|
||||||
|
body: "Upload, download, delete, purge, or version files stored in a namespace—useful for shipping assets or configs with flows; set the target namespace, paths or glob patterns, and purge behavior to control stored artifacts."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/output.yaml
Normal file
8
core/src/main/resources/metadata/output.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.output
|
||||||
|
name: "output"
|
||||||
|
title: "Output"
|
||||||
|
description: "Tasks that expose outputs from a flow."
|
||||||
|
body: "Use OutputValues to publish key-value outputs for downstream tasks or subflows; declare the output map and data types that consuming tasks should read."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/runner.yaml
Normal file
8
core/src/main/resources/metadata/runner.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.runner
|
||||||
|
name: "runner"
|
||||||
|
title: "Runner"
|
||||||
|
description: "Tasks that execute commands on the Kestra worker."
|
||||||
|
body: "Run shell processes with configurable command, environment, working directory, and input/output handling; ensure commands are idempotent and set expected exit codes or resource needs when invoking external binaries."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/storage.yaml
Normal file
8
core/src/main/resources/metadata/storage.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.storage
|
||||||
|
name: "storage"
|
||||||
|
title: "Storage"
|
||||||
|
description: "Tasks that manipulate files in Kestra's internal storage."
|
||||||
|
body: "Write, delete, concatenate, split, deduplicate, filter, reverse, size, or list files used by executions; provide source and target storage URIs and any encoding or line-handling options to transform stored data safely."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/templating.yaml
Normal file
8
core/src/main/resources/metadata/templating.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.templating
|
||||||
|
name: "templating"
|
||||||
|
title: "Templating"
|
||||||
|
description: "Tasks that render dynamic task specifications from templates."
|
||||||
|
body: "TemplatedTask lets you supply a Pebble-rendered YAML spec that is parsed and executed at runtime; provide the `spec` property with a valid runnable task definition and avoid recursive templating when composing dynamic tasks."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
8
core/src/main/resources/metadata/trigger.yaml
Normal file
8
core/src/main/resources/metadata/trigger.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
group: io.kestra.plugin.core.trigger
|
||||||
|
name: "trigger"
|
||||||
|
title: "Trigger"
|
||||||
|
description: "Tasks that start flows from schedules or events."
|
||||||
|
body: "Define cron-based schedules, specific date triggers, webhooks, namespace flow triggers, or toggles; set required properties like cron expressions, webhook secrets, and target flow references to control when executions fire."
|
||||||
|
videos: []
|
||||||
|
createdBy: "Kestra Core Team"
|
||||||
|
managedBy: "Kestra Core Team"
|
||||||
@@ -134,4 +134,47 @@ class LabelTest {
|
|||||||
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
|
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
|
||||||
assertThat(emptyKeyLabelResult.isPresent()).isTrue();
|
assertThat(emptyKeyLabelResult.isPresent()).isTrue();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void shouldValidateValidLabelKeys() {
|
||||||
|
// Valid keys: start with lowercase; may contain letters, numbers, hyphens, underscores, periods
|
||||||
|
assertThat(modelValidator.isValid(new Label("foo", "bar")).isPresent()).isFalse();
|
||||||
|
assertThat(modelValidator.isValid(new Label("foo-bar", "value")).isPresent()).isFalse();
|
||||||
|
assertThat(modelValidator.isValid(new Label("foo_bar", "value")).isPresent()).isFalse();
|
||||||
|
assertThat(modelValidator.isValid(new Label("foo123", "value")).isPresent()).isFalse();
|
||||||
|
assertThat(modelValidator.isValid(new Label("foo-bar_baz123", "value")).isPresent()).isFalse();
|
||||||
|
assertThat(modelValidator.isValid(new Label("a", "value")).isPresent()).isFalse();
|
||||||
|
assertThat(modelValidator.isValid(new Label("foo.bar", "value")).isPresent()).isFalse(); // dot is allowed
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void shouldRejectInvalidLabelKeys() {
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> spaceResult = modelValidator.isValid(new Label("foo bar", "value"));
|
||||||
|
assertThat(spaceResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> uppercaseResult = modelValidator.isValid(new Label("Foo", "value"));
|
||||||
|
assertThat(uppercaseResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> emojiResult = modelValidator.isValid(new Label("💩", "value"));
|
||||||
|
assertThat(emojiResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> atSignResult = modelValidator.isValid(new Label("foo@bar", "value"));
|
||||||
|
assertThat(atSignResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> colonResult = modelValidator.isValid(new Label("foo:bar", "value"));
|
||||||
|
assertThat(colonResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> hyphenStartResult = modelValidator.isValid(new Label("-foo", "value"));
|
||||||
|
assertThat(hyphenStartResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> underscoreStartResult = modelValidator.isValid(new Label("_foo", "value"));
|
||||||
|
assertThat(underscoreStartResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> zeroResult = modelValidator.isValid(new Label("0", "value"));
|
||||||
|
assertThat(zeroResult.isPresent()).isTrue();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> digitStartResult = modelValidator.isValid(new Label("9test", "value"));
|
||||||
|
assertThat(digitStartResult.isPresent()).isTrue();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -61,6 +61,9 @@ public class QueryFilterTest {
|
|||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.ENDS_WITH).build(), Resource.EXECUTION),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.CONTAINS).build(), Resource.EXECUTION),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.REGEX).build(), Resource.EXECUTION),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
|
||||||
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.EQUALS).build(), Resource.EXECUTION),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_EQUALS).build(), Resource.EXECUTION),
|
||||||
@@ -168,9 +171,6 @@ public class QueryFilterTest {
|
|||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN).build(), Resource.EXECUTION),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.GREATER_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.LESS_THAN_OR_EQUAL_TO).build(), Resource.EXECUTION),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.IN).build(), Resource.EXECUTION),
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.NOT_IN).build(), Resource.EXECUTION),
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).operation(Op.PREFIX).build(), Resource.EXECUTION),
|
|
||||||
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.IN).build(), Resource.EXECUTION),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION),
|
Arguments.of(QueryFilter.builder().field(Field.START_DATE).operation(Op.NOT_IN).build(), Resource.EXECUTION),
|
||||||
|
|||||||
@@ -185,4 +185,21 @@ class FlowTest {
|
|||||||
|
|
||||||
return YamlParser.parse(file, Flow.class);
|
return YamlParser.parse(file, Flow.class);
|
||||||
}
|
}
|
||||||
|
@Test
|
||||||
|
void illegalNamespaceUpdate() {
|
||||||
|
Flow original = Flow.builder()
|
||||||
|
.id("my-flow")
|
||||||
|
.namespace("io.kestra.prod")
|
||||||
|
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("hello").build()))
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Flow updated = original.toBuilder()
|
||||||
|
.namespace("io.kestra.dev")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
Optional<ConstraintViolationException> validate = original.validateUpdate(updated);
|
||||||
|
|
||||||
|
assertThat(validate.isPresent()).isTrue();
|
||||||
|
assertThat(validate.get().getMessage()).contains("Illegal namespace update");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -52,8 +52,8 @@ import java.util.function.Function;
|
|||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static io.kestra.core.models.flows.FlowScope.SYSTEM;
|
||||||
import static io.kestra.core.models.flows.FlowScope.USER;
|
import static io.kestra.core.models.flows.FlowScope.USER;
|
||||||
import static java.time.temporal.ChronoUnit.MINUTES;
|
|
||||||
import static java.time.temporal.ChronoUnit.SECONDS;
|
import static java.time.temporal.ChronoUnit.SECONDS;
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||||
@@ -81,6 +81,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
|||||||
.tenantId(tenantId)
|
.tenantId(tenantId)
|
||||||
.flowId(flowId == null ? FLOW : flowId)
|
.flowId(flowId == null ? FLOW : flowId)
|
||||||
.flowRevision(1)
|
.flowRevision(1)
|
||||||
|
.kind(ExecutionKind.NORMAL)
|
||||||
.state(finalState);
|
.state(finalState);
|
||||||
|
|
||||||
|
|
||||||
@@ -196,15 +197,49 @@ public abstract class AbstractExecutionRepositoryTest {
|
|||||||
static Stream<Arguments> filterCombinations() {
|
static Stream<Arguments> filterCombinations() {
|
||||||
return Stream.of(
|
return Stream.of(
|
||||||
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unittest").operation(Op.EQUALS).build(), 29),
|
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unittest").operation(Op.EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.QUERY).value("unused").operation(Op.NOT_EQUALS).build(), 29),
|
||||||
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(USER)).operation(Op.EQUALS).build(), 29),
|
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(USER)).operation(Op.EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.NOT_EQUALS).build(), 29),
|
||||||
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.unittest").operation(Op.EQUALS).build(), 29),
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.unittest").operation(Op.EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("not.this.one").operation(Op.NOT_EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittes").operation(Op.CONTAINS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra.uni").operation(Op.STARTS_WITH).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("o.kestra.unittest").operation(Op.ENDS_WITH).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io\\.kestra\\.unittest").operation(Op.REGEX).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("io.kestra.unittest", "unused")).operation(Op.IN).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value(List.of("unused.first", "unused.second")).operation(Op.NOT_IN).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.NAMESPACE).value("io.kestra").operation(Op.PREFIX).build(), 29),
|
||||||
|
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.NORMAL).operation(Op.EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.KIND).value(ExecutionKind.TEST).operation(Op.NOT_EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.NORMAL, ExecutionKind.PLAYGROUND)).operation(Op.IN).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.KIND).value(List.of(ExecutionKind.PLAYGROUND, ExecutionKind.TEST)).operation(Op.NOT_IN).build(), 29),
|
||||||
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), 1),
|
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(), 1),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "unknown")).operation(Op.NOT_EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value", "key2", "value2")).operation(Op.IN).build(), 1),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.LABELS).value(Map.of("key1", "value1")).operation(Op.NOT_IN).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.LABELS).value("value").operation(Op.CONTAINS).build(), 1),
|
||||||
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.EQUALS).build(), 16),
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.EQUALS).build(), 16),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(FLOW).operation(Op.NOT_EQUALS).build(), 13),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ul").operation(Op.CONTAINS).build(), 16),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.STARTS_WITH).build(), 16),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ull").operation(Op.ENDS_WITH).build(), 16),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("[ful]{4}").operation(Op.REGEX).build(), 16),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other")).operation(Op.IN).build(), 16),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value(List.of(FLOW, "other2")).operation(Op.NOT_IN).build(), 13),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.FLOW_ID).value("ful").operation(Op.PREFIX).build(), 16),
|
||||||
|
|
||||||
Arguments.of(QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), 29),
|
Arguments.of(QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(), 29),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), 29),
|
Arguments.of(QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(), 29),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.STATE).value(Type.RUNNING).operation(Op.EQUALS).build(), 5),
|
Arguments.of(QueryFilter.builder().field(Field.STATE).value(Type.RUNNING).operation(Op.EQUALS).build(), 5),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.TRIGGER_EXECUTION_ID).value("executionTriggerId").operation(Op.EQUALS).build(), 29),
|
Arguments.of(QueryFilter.builder().field(Field.TRIGGER_EXECUTION_ID).value("executionTriggerId").operation(Op.EQUALS).build(), 29),
|
||||||
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29)
|
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.EQUALS).build(), 29),
|
||||||
|
Arguments.of(QueryFilter.builder().field(Field.CHILD_FILTER).value(ChildFilter.CHILD).operation(Op.NOT_EQUALS).build(), 0)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -656,6 +691,65 @@ public abstract class AbstractExecutionRepositoryTest {
|
|||||||
assertThat(data).first().hasFieldOrPropertyWithValue("id", execution.getId());
|
assertThat(data).first().hasFieldOrPropertyWithValue("id", execution.getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void dashboard_fetchData_365Days_verifiesDateGrouping() throws IOException {
|
||||||
|
var tenantId = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||||
|
var executionDuration = Duration.ofMinutes(220);
|
||||||
|
var executionCreateDate = Instant.now();
|
||||||
|
|
||||||
|
// Create an execution within the 365-day range
|
||||||
|
Execution execution = Execution.builder()
|
||||||
|
.tenantId(tenantId)
|
||||||
|
.id(IdUtils.create())
|
||||||
|
.namespace("io.kestra.unittest")
|
||||||
|
.flowId("some-execution")
|
||||||
|
.flowRevision(1)
|
||||||
|
.labels(Label.from(Map.of("country", "FR")))
|
||||||
|
.state(new State(Type.SUCCESS,
|
||||||
|
List.of(new State.History(State.Type.CREATED, executionCreateDate), new State.History(Type.SUCCESS, executionCreateDate.plus(executionDuration)))))
|
||||||
|
.taskRunList(List.of())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
execution = executionRepository.save(execution);
|
||||||
|
|
||||||
|
// Create an execution BEYOND 365 days (400 days ago) - should be filtered out
|
||||||
|
var executionCreateDateOld = Instant.now().minus(Duration.ofDays(400));
|
||||||
|
Execution executionOld = Execution.builder()
|
||||||
|
.tenantId(tenantId)
|
||||||
|
.id(IdUtils.create())
|
||||||
|
.namespace("io.kestra.unittest")
|
||||||
|
.flowId("some-execution-old")
|
||||||
|
.flowRevision(1)
|
||||||
|
.labels(Label.from(Map.of("country", "US")))
|
||||||
|
.state(new State(Type.SUCCESS,
|
||||||
|
List.of(new State.History(State.Type.CREATED, executionCreateDateOld), new State.History(Type.SUCCESS, executionCreateDateOld.plus(executionDuration)))))
|
||||||
|
.taskRunList(List.of())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
executionRepository.save(executionOld);
|
||||||
|
|
||||||
|
var now = ZonedDateTime.now();
|
||||||
|
ArrayListTotal<Map<String, Object>> data = executionRepository.fetchData(tenantId, Executions.builder()
|
||||||
|
.type(Executions.class.getName())
|
||||||
|
.columns(Map.of(
|
||||||
|
"count", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).agg(AggregationType.COUNT).build(),
|
||||||
|
"id", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.ID).build(),
|
||||||
|
"date", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.START_DATE).build(),
|
||||||
|
"duration", ColumnDescriptor.<Executions.Fields>builder().field(Executions.Fields.DURATION).build()
|
||||||
|
)).build(),
|
||||||
|
now.minusDays(365),
|
||||||
|
now,
|
||||||
|
null
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should only return 1 execution (the recent one), not the 400-day-old execution
|
||||||
|
assertThat(data.getTotal()).isGreaterThanOrEqualTo(1L);
|
||||||
|
assertThat(data).isNotEmpty();
|
||||||
|
assertThat(data).first().hasFieldOrProperty("count");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private static Execution buildWithCreatedDate(String tenant, Instant instant) {
|
private static Execution buildWithCreatedDate(String tenant, Instant instant) {
|
||||||
return Execution.builder()
|
return Execution.builder()
|
||||||
.id(IdUtils.create())
|
.id(IdUtils.create())
|
||||||
|
|||||||
@@ -121,7 +121,8 @@ public abstract class AbstractFlowRepositoryTest {
|
|||||||
QueryFilter.builder().field(Field.QUERY).value("filterFlowId").operation(Op.EQUALS).build(),
|
QueryFilter.builder().field(Field.QUERY).value("filterFlowId").operation(Op.EQUALS).build(),
|
||||||
QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.EQUALS).build(),
|
QueryFilter.builder().field(Field.SCOPE).value(List.of(SYSTEM)).operation(Op.EQUALS).build(),
|
||||||
QueryFilter.builder().field(Field.NAMESPACE).value(SYSTEM_FLOWS_DEFAULT_NAMESPACE).operation(Op.EQUALS).build(),
|
QueryFilter.builder().field(Field.NAMESPACE).value(SYSTEM_FLOWS_DEFAULT_NAMESPACE).operation(Op.EQUALS).build(),
|
||||||
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build()
|
QueryFilter.builder().field(Field.LABELS).value(Map.of("key", "value")).operation(Op.EQUALS).build(),
|
||||||
|
QueryFilter.builder().field(Field.FLOW_ID).value("filterFlowId").operation(Op.EQUALS).build()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -145,7 +146,6 @@ public abstract class AbstractFlowRepositoryTest {
|
|||||||
|
|
||||||
static Stream<QueryFilter> errorFilterCombinations() {
|
static Stream<QueryFilter> errorFilterCombinations() {
|
||||||
return Stream.of(
|
return Stream.of(
|
||||||
QueryFilter.builder().field(Field.FLOW_ID).value("sleep").operation(Op.EQUALS).build(),
|
|
||||||
QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(),
|
QueryFilter.builder().field(Field.START_DATE).value(ZonedDateTime.now().minusMinutes(1)).operation(Op.GREATER_THAN).build(),
|
||||||
QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(),
|
QueryFilter.builder().field(Field.END_DATE).value(ZonedDateTime.now().plusMinutes(1)).operation(Op.LESS_THAN).build(),
|
||||||
QueryFilter.builder().field(Field.STATE).value(State.Type.RUNNING).operation(Op.EQUALS).build(),
|
QueryFilter.builder().field(Field.STATE).value(State.Type.RUNNING).operation(Op.EQUALS).build(),
|
||||||
|
|||||||
@@ -0,0 +1,91 @@
|
|||||||
|
package io.kestra.core.runners;
|
||||||
|
|
||||||
|
import io.kestra.core.junit.annotations.FlakyTest;
|
||||||
|
import io.kestra.core.junit.annotations.KestraTest;
|
||||||
|
import io.kestra.core.junit.annotations.LoadFlows;
|
||||||
|
import jakarta.inject.Inject;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.TestInstance;
|
||||||
|
|
||||||
|
@KestraTest(startRunner = true)
|
||||||
|
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||||
|
public abstract class AbstractRunnerConcurrencyTest {
|
||||||
|
public static final String TENANT_1 = "tenant1";
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-cancel.yml"})
|
||||||
|
void concurrencyCancel() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyCancel();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-fail.yml"})
|
||||||
|
void concurrencyFail() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyFail();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-queue.yml"})
|
||||||
|
void concurrencyQueue() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyQueue();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-queue-pause.yml"})
|
||||||
|
protected void concurrencyQueuePause() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyQueuePause();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-cancel-pause.yml"})
|
||||||
|
protected void concurrencyCancelPause() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyCancelPause();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
|
||||||
|
protected void flowConcurrencyWithForEachItem() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
|
||||||
|
protected void concurrencyQueueRestarted() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-queue-after-execution.yml"})
|
||||||
|
void concurrencyQueueAfterExecution() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
|
||||||
|
void flowConcurrencySubflow() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@FlakyTest(description = "Only flaky in CI")
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"})
|
||||||
|
protected void flowConcurrencyParallelSubflowKill() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-queue-killed.yml"})
|
||||||
|
void flowConcurrencyKilled() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyKilled();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@FlakyTest(description = "Only flaky in CI")
|
||||||
|
@LoadFlows({"flows/valids/flow-concurrency-queue-killed.yml"})
|
||||||
|
void flowConcurrencyQueueKilled() throws Exception {
|
||||||
|
flowConcurrencyCaseTest.flowConcurrencyQueueKilled();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -66,9 +66,6 @@ public abstract class AbstractRunnerTest {
|
|||||||
@Inject
|
@Inject
|
||||||
protected LoopUntilCaseTest loopUntilTestCaseTest;
|
protected LoopUntilCaseTest loopUntilTestCaseTest;
|
||||||
|
|
||||||
@Inject
|
|
||||||
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
protected ScheduleDateCaseTest scheduleDateCaseTest;
|
protected ScheduleDateCaseTest scheduleDateCaseTest;
|
||||||
|
|
||||||
@@ -422,66 +419,6 @@ public abstract class AbstractRunnerTest {
|
|||||||
forEachItemCaseTest.forEachItemWithAfterExecution();
|
forEachItemCaseTest.forEachItemWithAfterExecution();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-cancel.yml"})
|
|
||||||
void concurrencyCancel() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyCancel();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-fail.yml"})
|
|
||||||
void concurrencyFail() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyFail();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-queue.yml"})
|
|
||||||
void concurrencyQueue() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyQueue();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-queue-pause.yml"})
|
|
||||||
protected void concurrencyQueuePause() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyQueuePause();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-cancel-pause.yml"})
|
|
||||||
protected void concurrencyCancelPause() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyCancelPause();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
|
|
||||||
protected void flowConcurrencyWithForEachItem() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
|
|
||||||
protected void concurrencyQueueRestarted() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyQueueRestarted();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-queue-after-execution.yml"})
|
|
||||||
void concurrencyQueueAfterExecution() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
|
|
||||||
void flowConcurrencySubflow() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@LoadFlows({"flows/valids/flow-concurrency-parallel-subflow-kill.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-child.yaml", "flows/valids/flow-concurrency-parallel-subflow-kill-grandchild.yaml"})
|
|
||||||
void flowConcurrencyParallelSubflowKill() throws Exception {
|
|
||||||
flowConcurrencyCaseTest.flowConcurrencyParallelSubflowKill();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ExecuteFlow("flows/valids/executable-fail.yml")
|
@ExecuteFlow("flows/valids/executable-fail.yml")
|
||||||
void badExecutable(Execution execution) {
|
void badExecutable(Execution execution) {
|
||||||
|
|||||||
@@ -69,6 +69,7 @@ public class FlowConcurrencyCaseTest {
|
|||||||
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(Type.CANCELLED::equals);
|
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(Type.CANCELLED::equals);
|
||||||
} finally {
|
} finally {
|
||||||
runnerUtils.killExecution(execution1);
|
runnerUtils.killExecution(execution1);
|
||||||
|
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -84,6 +85,7 @@ public class FlowConcurrencyCaseTest {
|
|||||||
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(State.Type.FAILED::equals);
|
assertThat(shouldFailExecutions.stream().map(Execution::getState).map(State::getCurrent)).allMatch(State.Type.FAILED::equals);
|
||||||
} finally {
|
} finally {
|
||||||
runnerUtils.killExecution(execution1);
|
runnerUtils.killExecution(execution1);
|
||||||
|
runnerUtils.awaitExecution(e -> e.getState().isTerminated(), execution1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -240,6 +242,94 @@ public class FlowConcurrencyCaseTest {
|
|||||||
assertThat(terminated.getTaskRunList()).isNull();
|
assertThat(terminated.getTaskRunList()).isNull();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void flowConcurrencyKilled() throws QueueException, InterruptedException {
|
||||||
|
Flow flow = flowRepository
|
||||||
|
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
|
||||||
|
.orElseThrow();
|
||||||
|
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
|
||||||
|
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||||
|
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||||
|
|
||||||
|
try {
|
||||||
|
assertThat(execution1.getState().isRunning()).isTrue();
|
||||||
|
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||||
|
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||||
|
|
||||||
|
// we kill execution 1, execution 2 should run but not execution 3
|
||||||
|
killQueue.emit(ExecutionKilledExecution
|
||||||
|
.builder()
|
||||||
|
.state(ExecutionKilled.State.REQUESTED)
|
||||||
|
.executionId(execution1.getId())
|
||||||
|
.isOnKillCascade(true)
|
||||||
|
.tenantId(MAIN_TENANT)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution1);
|
||||||
|
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
|
||||||
|
assertThat(killed.getState().getHistories().stream().anyMatch(h -> h.getState() == Type.RUNNING)).isTrue();
|
||||||
|
|
||||||
|
// we now check that execution 2 is running
|
||||||
|
Execution running = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
|
||||||
|
assertThat(running.getState().getCurrent()).isEqualTo(Type.RUNNING);
|
||||||
|
|
||||||
|
// we check that execution 3 is still queued
|
||||||
|
Thread.sleep(100); // wait a little to be 100% sure
|
||||||
|
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
|
||||||
|
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||||
|
} finally {
|
||||||
|
// kill everything to avoid dangling executions
|
||||||
|
runnerUtils.killExecution(execution1);
|
||||||
|
runnerUtils.killExecution(execution2);
|
||||||
|
runnerUtils.killExecution(execution3);
|
||||||
|
|
||||||
|
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
|
||||||
|
runnerUtils.awaitFlowExecutionNumber(3, MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void flowConcurrencyQueueKilled() throws QueueException, InterruptedException {
|
||||||
|
Flow flow = flowRepository
|
||||||
|
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", Optional.empty())
|
||||||
|
.orElseThrow();
|
||||||
|
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed", null, null, Duration.ofSeconds(30));
|
||||||
|
Execution execution2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||||
|
Execution execution3 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.QUEUED), Execution.newExecution(flow, null, null, Optional.empty()));
|
||||||
|
|
||||||
|
try {
|
||||||
|
assertThat(execution1.getState().isRunning()).isTrue();
|
||||||
|
assertThat(execution2.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||||
|
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||||
|
|
||||||
|
// we kill execution 2, execution 3 should not run
|
||||||
|
killQueue.emit(ExecutionKilledExecution
|
||||||
|
.builder()
|
||||||
|
.state(ExecutionKilled.State.REQUESTED)
|
||||||
|
.executionId(execution2.getId())
|
||||||
|
.isOnKillCascade(true)
|
||||||
|
.tenantId(MAIN_TENANT)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
|
||||||
|
Execution killed = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.KILLED), execution2);
|
||||||
|
assertThat(killed.getState().getCurrent()).isEqualTo(Type.KILLED);
|
||||||
|
assertThat(killed.getState().getHistories().stream().noneMatch(h -> h.getState() == Type.RUNNING)).isTrue();
|
||||||
|
|
||||||
|
// we now check that execution 3 is still queued
|
||||||
|
Thread.sleep(100); // wait a little to be 100% sure
|
||||||
|
Execution queued = runnerUtils.awaitExecution(e -> e.getState().isQueued(), execution3);
|
||||||
|
assertThat(queued.getState().getCurrent()).isEqualTo(Type.QUEUED);
|
||||||
|
} finally {
|
||||||
|
// kill everything to avoid dangling executions
|
||||||
|
runnerUtils.killExecution(execution1);
|
||||||
|
runnerUtils.killExecution(execution2);
|
||||||
|
runnerUtils.killExecution(execution3);
|
||||||
|
|
||||||
|
// await that they are all terminated, note that as KILLED is received twice, some messages would still be pending, but this is the best we can do
|
||||||
|
runnerUtils.awaitFlowExecutionNumber(3, MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-killed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
|
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
|
||||||
File tempFile = File.createTempFile("file", ".txt");
|
File tempFile = File.createTempFile("file", ".txt");
|
||||||
|
|
||||||
|
|||||||
@@ -2,9 +2,7 @@ package io.kestra.core.runners;
|
|||||||
|
|
||||||
import io.kestra.core.junit.annotations.KestraTest;
|
import io.kestra.core.junit.annotations.KestraTest;
|
||||||
import io.kestra.core.models.executions.Execution;
|
import io.kestra.core.models.executions.Execution;
|
||||||
import io.kestra.core.models.flows.DependsOn;
|
import io.kestra.core.models.flows.*;
|
||||||
import io.kestra.core.models.flows.Input;
|
|
||||||
import io.kestra.core.models.flows.Type;
|
|
||||||
import io.kestra.core.models.flows.input.FileInput;
|
import io.kestra.core.models.flows.input.FileInput;
|
||||||
import io.kestra.core.models.flows.input.InputAndValue;
|
import io.kestra.core.models.flows.input.InputAndValue;
|
||||||
import io.kestra.core.models.flows.input.IntInput;
|
import io.kestra.core.models.flows.input.IntInput;
|
||||||
@@ -32,6 +30,7 @@ import org.reactivestreams.Publisher;
|
|||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.File;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
@@ -412,6 +411,40 @@ class FlowInputOutputTest {
|
|||||||
assertThat(results.get("input")).isEqualTo("default");
|
assertThat(results.get("input")).isEqualTo("default");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void shouldResolveZeroByteFileUpload() throws java.io.IOException {
|
||||||
|
File tempFile = File.createTempFile("empty", ".txt");
|
||||||
|
tempFile.deleteOnExit();
|
||||||
|
|
||||||
|
io.micronaut.http.multipart.CompletedFileUpload fileUpload = org.mockito.Mockito.mock(io.micronaut.http.multipart.CompletedFileUpload.class);
|
||||||
|
org.mockito.Mockito.when(fileUpload.getInputStream()).thenReturn(new java.io.FileInputStream(tempFile));
|
||||||
|
org.mockito.Mockito.when(fileUpload.getFilename()).thenReturn("empty.txt");
|
||||||
|
org.mockito.Mockito.when(fileUpload.getName()).thenReturn("empty_file");
|
||||||
|
|
||||||
|
Execution execution = Execution.builder()
|
||||||
|
.id(IdUtils.create())
|
||||||
|
.tenantId("unit_test_tenant")
|
||||||
|
.namespace("io.kestra.unittest")
|
||||||
|
.flowId("unittest")
|
||||||
|
.flowRevision(1)
|
||||||
|
.state(new State())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
reactor.core.publisher.Mono<Map<String, Object>> result = flowInputOutput.readExecutionInputs(
|
||||||
|
List.of(
|
||||||
|
io.kestra.core.models.flows.input.FileInput.builder().id("empty_file").type(Type.FILE).build()
|
||||||
|
),
|
||||||
|
Flow.builder().id("unittest").namespace("io.kestra.unittest").build(),
|
||||||
|
execution,
|
||||||
|
reactor.core.publisher.Flux.just(fileUpload)
|
||||||
|
);
|
||||||
|
|
||||||
|
Map<String, Object> outputs = result.block();
|
||||||
|
|
||||||
|
Assertions.assertNotNull(outputs);
|
||||||
|
Assertions.assertTrue(outputs.containsKey("empty_file"));
|
||||||
|
}
|
||||||
|
|
||||||
private static class MemoryCompletedPart implements CompletedPart {
|
private static class MemoryCompletedPart implements CompletedPart {
|
||||||
|
|
||||||
protected final String name;
|
protected final String name;
|
||||||
|
|||||||
@@ -56,6 +56,18 @@ public class InputsTest {
|
|||||||
@Inject
|
@Inject
|
||||||
private NamespaceFactory namespaceFactory;
|
private NamespaceFactory namespaceFactory;
|
||||||
|
|
||||||
|
private static final Map<String , Object> object = Map.of(
|
||||||
|
"people", List.of(
|
||||||
|
Map.of(
|
||||||
|
"first", "Mustafa",
|
||||||
|
"last", "Tarek"
|
||||||
|
),
|
||||||
|
Map.of(
|
||||||
|
"first", "Ahmed",
|
||||||
|
"last", "Tarek"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
|
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
|
||||||
.put("string", "myString")
|
.put("string", "myString")
|
||||||
.put("enum", "ENUM_VALUE")
|
.put("enum", "ENUM_VALUE")
|
||||||
@@ -67,7 +79,6 @@ public class InputsTest {
|
|||||||
.put("time", "18:27:49")
|
.put("time", "18:27:49")
|
||||||
.put("duration", "PT5M6S")
|
.put("duration", "PT5M6S")
|
||||||
.put("file", Objects.requireNonNull(InputsTest.class.getClassLoader().getResource("application-test.yml")).getPath())
|
.put("file", Objects.requireNonNull(InputsTest.class.getClassLoader().getResource("application-test.yml")).getPath())
|
||||||
.put("json", "{\"a\": \"b\"}")
|
|
||||||
.put("uri", "https://www.google.com")
|
.put("uri", "https://www.google.com")
|
||||||
.put("nested.string", "a string")
|
.put("nested.string", "a string")
|
||||||
.put("nested.more.int", "123")
|
.put("nested.more.int", "123")
|
||||||
@@ -81,11 +92,14 @@ public class InputsTest {
|
|||||||
.put("validatedTime", "11:27:49")
|
.put("validatedTime", "11:27:49")
|
||||||
.put("secret", "secret")
|
.put("secret", "secret")
|
||||||
.put("array", "[1, 2, 3]")
|
.put("array", "[1, 2, 3]")
|
||||||
.put("yaml", """
|
.put("json1", "{\"a\": \"b\"}")
|
||||||
|
.put("json2", object)
|
||||||
|
.put("yaml1", """
|
||||||
some: property
|
some: property
|
||||||
alist:
|
alist:
|
||||||
- of
|
- of
|
||||||
- values""")
|
- values""")
|
||||||
|
.put("yaml2", object)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
@@ -154,7 +168,6 @@ public class InputsTest {
|
|||||||
assertThat(typeds.get("duration")).isEqualTo(Duration.parse("PT5M6S"));
|
assertThat(typeds.get("duration")).isEqualTo(Duration.parse("PT5M6S"));
|
||||||
assertThat((URI) typeds.get("file")).isEqualTo(new URI("kestra:///io/kestra/tests/inputs/executions/test/inputs/file/application-test.yml"));
|
assertThat((URI) typeds.get("file")).isEqualTo(new URI("kestra:///io/kestra/tests/inputs/executions/test/inputs/file/application-test.yml"));
|
||||||
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get("tenant1", null, (URI) typeds.get("file"))))).isEqualTo(CharStreams.toString(new InputStreamReader(new FileInputStream((String) inputs.get("file")))));
|
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get("tenant1", null, (URI) typeds.get("file"))))).isEqualTo(CharStreams.toString(new InputStreamReader(new FileInputStream((String) inputs.get("file")))));
|
||||||
assertThat(typeds.get("json")).isEqualTo(Map.of("a", "b"));
|
|
||||||
assertThat(typeds.get("uri")).isEqualTo("https://www.google.com");
|
assertThat(typeds.get("uri")).isEqualTo("https://www.google.com");
|
||||||
assertThat(((Map<String, Object>) typeds.get("nested")).get("string")).isEqualTo("a string");
|
assertThat(((Map<String, Object>) typeds.get("nested")).get("string")).isEqualTo("a string");
|
||||||
assertThat((Boolean) ((Map<String, Object>) typeds.get("nested")).get("bool")).isTrue();
|
assertThat((Boolean) ((Map<String, Object>) typeds.get("nested")).get("bool")).isTrue();
|
||||||
@@ -170,9 +183,12 @@ public class InputsTest {
|
|||||||
assertThat(typeds.get("array")).isInstanceOf(List.class);
|
assertThat(typeds.get("array")).isInstanceOf(List.class);
|
||||||
assertThat((List<Integer>) typeds.get("array")).hasSize(3);
|
assertThat((List<Integer>) typeds.get("array")).hasSize(3);
|
||||||
assertThat((List<Integer>) typeds.get("array")).isEqualTo(List.of(1, 2, 3));
|
assertThat((List<Integer>) typeds.get("array")).isEqualTo(List.of(1, 2, 3));
|
||||||
assertThat(typeds.get("yaml")).isEqualTo(Map.of(
|
assertThat(typeds.get("json1")).isEqualTo(Map.of("a", "b"));
|
||||||
|
assertThat(typeds.get("json2")).isEqualTo(object);
|
||||||
|
assertThat(typeds.get("yaml1")).isEqualTo(Map.of(
|
||||||
"some", "property",
|
"some", "property",
|
||||||
"alist", List.of("of", "values")));
|
"alist", List.of("of", "values")));
|
||||||
|
assertThat(typeds.get("yaml2")).isEqualTo(object);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@@ -201,7 +217,7 @@ public class InputsTest {
|
|||||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs)
|
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs)
|
||||||
);
|
);
|
||||||
|
|
||||||
assertThat(execution.getTaskRunList()).hasSize(14);
|
assertThat(execution.getTaskRunList()).hasSize(16);
|
||||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||||
assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).matches("kestra:///io/kestra/tests/inputs/executions/.*/inputs/file/application-test.yml");
|
assertThat((String) execution.findTaskRunsByTaskId("file").getFirst().getOutputs().get("value")).matches("kestra:///io/kestra/tests/inputs/executions/.*/inputs/file/application-test.yml");
|
||||||
// secret inputs are decrypted to be used as task properties
|
// secret inputs are decrypted to be used as task properties
|
||||||
@@ -354,19 +370,19 @@ public class InputsTest {
|
|||||||
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant14")
|
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant14")
|
||||||
void inputEmptyJson() {
|
void inputEmptyJson() {
|
||||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||||
map.put("json", "{}");
|
map.put("json1", "{}");
|
||||||
|
|
||||||
Map<String, Object> typeds = typedInputs(map, "tenant14");
|
Map<String, Object> typeds = typedInputs(map, "tenant14");
|
||||||
|
|
||||||
assertThat(typeds.get("json")).isInstanceOf(Map.class);
|
assertThat(typeds.get("json1")).isInstanceOf(Map.class);
|
||||||
assertThat(((Map<?, ?>) typeds.get("json")).size()).isZero();
|
assertThat(((Map<?, ?>) typeds.get("json1")).size()).isZero();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant15")
|
@LoadFlows(value = {"flows/valids/inputs.yaml"}, tenantId = "tenant15")
|
||||||
void inputEmptyJsonFlow() throws TimeoutException, QueueException {
|
void inputEmptyJsonFlow() throws TimeoutException, QueueException {
|
||||||
HashMap<String, Object> map = new HashMap<>(inputs);
|
HashMap<String, Object> map = new HashMap<>(inputs);
|
||||||
map.put("json", "{}");
|
map.put("json1", "{}");
|
||||||
|
|
||||||
Execution execution = runnerUtils.runOne(
|
Execution execution = runnerUtils.runOne(
|
||||||
"tenant15",
|
"tenant15",
|
||||||
@@ -376,11 +392,11 @@ public class InputsTest {
|
|||||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, map)
|
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, map)
|
||||||
);
|
);
|
||||||
|
|
||||||
assertThat(execution.getTaskRunList()).hasSize(14);
|
assertThat(execution.getTaskRunList()).hasSize(16);
|
||||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||||
|
|
||||||
assertThat(execution.getInputs().get("json")).isInstanceOf(Map.class);
|
assertThat(execution.getInputs().get("json1")).isInstanceOf(Map.class);
|
||||||
assertThat(((Map<?, ?>) execution.getInputs().get("json")).size()).isZero();
|
assertThat(((Map<?, ?>) execution.getInputs().get("json1")).size()).isZero();
|
||||||
assertThat((String) execution.findTaskRunsByTaskId("jsonOutput").getFirst().getOutputs().get("value")).isEqualTo("{}");
|
assertThat((String) execution.findTaskRunsByTaskId("jsonOutput").getFirst().getOutputs().get("value")).isEqualTo("{}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -122,10 +122,10 @@ class YamlParserTest {
|
|||||||
void inputs() {
|
void inputs() {
|
||||||
Flow flow = this.parse("flows/valids/inputs.yaml");
|
Flow flow = this.parse("flows/valids/inputs.yaml");
|
||||||
|
|
||||||
assertThat(flow.getInputs().size()).isEqualTo(29);
|
assertThat(flow.getInputs().size()).isEqualTo(31);
|
||||||
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(11L);
|
assertThat(flow.getInputs().stream().filter(Input::getRequired).count()).isEqualTo(12L);
|
||||||
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(18L);
|
assertThat(flow.getInputs().stream().filter(r -> !r.getRequired()).count()).isEqualTo(19L);
|
||||||
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(3L);
|
assertThat(flow.getInputs().stream().filter(r -> r.getDefaults() != null).count()).isEqualTo(4L);
|
||||||
assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput stringInput && stringInput.getValidator() != null).count()).isEqualTo(1L);
|
assertThat(flow.getInputs().stream().filter(r -> r instanceof StringInput stringInput && stringInput.getValidator() != null).count()).isEqualTo(1L);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -48,8 +48,8 @@ class ListUtilsTest {
|
|||||||
void convertToListString(){
|
void convertToListString(){
|
||||||
assertThat(ListUtils.convertToListString(List.of("string1", "string2"))).isEqualTo(List.of("string1", "string2"));
|
assertThat(ListUtils.convertToListString(List.of("string1", "string2"))).isEqualTo(List.of("string1", "string2"));
|
||||||
assertThat(ListUtils.convertToListString(List.of())).isEqualTo(List.of());
|
assertThat(ListUtils.convertToListString(List.of())).isEqualTo(List.of());
|
||||||
|
assertThat(ListUtils.convertToListString(List.of(1, 2, 3))).isEqualTo(List.of("1", "2", "3"));
|
||||||
|
|
||||||
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString("not a list"));
|
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString("not a list"));
|
||||||
assertThrows(IllegalArgumentException.class, () -> ListUtils.convertToListString(List.of(1, 2, 3)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,9 +1,11 @@
|
|||||||
package io.kestra.plugin.core.flow;
|
package io.kestra.plugin.core.flow;
|
||||||
|
|
||||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||||
|
import static org.assertj.core.api.Assertions.as;
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import io.kestra.core.junit.annotations.ExecuteFlow;
|
||||||
import io.kestra.core.junit.annotations.KestraTest;
|
import io.kestra.core.junit.annotations.KestraTest;
|
||||||
import io.kestra.core.junit.annotations.LoadFlows;
|
import io.kestra.core.junit.annotations.LoadFlows;
|
||||||
import io.kestra.core.models.executions.Execution;
|
import io.kestra.core.models.executions.Execution;
|
||||||
@@ -100,4 +102,14 @@ class SwitchTest {
|
|||||||
|
|
||||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@ExecuteFlow("flows/valids/switch-in-concurrent-loop.yaml")
|
||||||
|
void switchInConcurrentLoop(Execution execution) {
|
||||||
|
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||||
|
assertThat(execution.getTaskRunList()).hasSize(5);
|
||||||
|
// we check that OOMCRM_EB_DD_000 and OOMCRM_EB_DD_001 have been processed once
|
||||||
|
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_000")).count()).isEqualTo(1);
|
||||||
|
assertThat(execution.getTaskRunList().stream().filter(t -> t.getTaskId().equals("OOMCRM_EB_DD_001")).count()).isEqualTo(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,11 @@
|
|||||||
|
id: flow-concurrency-queue-killed
|
||||||
|
namespace: io.kestra.tests
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
behavior: QUEUE
|
||||||
|
limit: 1
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- id: sleep
|
||||||
|
type: io.kestra.plugin.core.flow.Sleep
|
||||||
|
duration: PT1M
|
||||||
@@ -41,7 +41,10 @@ inputs:
|
|||||||
- id: instantDefaults
|
- id: instantDefaults
|
||||||
type: DATETIME
|
type: DATETIME
|
||||||
defaults: "2013-08-09T14:19:00Z"
|
defaults: "2013-08-09T14:19:00Z"
|
||||||
- id: json
|
- id: json1
|
||||||
|
type: JSON
|
||||||
|
required: false
|
||||||
|
- id: json2
|
||||||
type: JSON
|
type: JSON
|
||||||
required: false
|
required: false
|
||||||
- id: uri
|
- id: uri
|
||||||
@@ -95,7 +98,7 @@ inputs:
|
|||||||
- name: array
|
- name: array
|
||||||
type: ARRAY
|
type: ARRAY
|
||||||
itemType: INT
|
itemType: INT
|
||||||
- name: yaml
|
- name: yaml1
|
||||||
type: YAML
|
type: YAML
|
||||||
defaults:
|
defaults:
|
||||||
property: something
|
property: something
|
||||||
@@ -104,6 +107,15 @@ inputs:
|
|||||||
value: value1
|
value: value1
|
||||||
- key: key2
|
- key: key2
|
||||||
value: value2
|
value: value2
|
||||||
|
- name: yaml2
|
||||||
|
type: YAML
|
||||||
|
defaults:
|
||||||
|
property: something
|
||||||
|
list:
|
||||||
|
- key: key1
|
||||||
|
value: value1
|
||||||
|
- key: key2
|
||||||
|
value: value2
|
||||||
# required true and an empty default value will only work if we correctly serialize default values which is what this input is about to test.
|
# required true and an empty default value will only work if we correctly serialize default values which is what this input is about to test.
|
||||||
- name: empty
|
- name: empty
|
||||||
type: STRING
|
type: STRING
|
||||||
@@ -140,12 +152,18 @@ tasks:
|
|||||||
type: io.kestra.plugin.core.debug.Return
|
type: io.kestra.plugin.core.debug.Return
|
||||||
format: "{{taskrun.value}}"
|
format: "{{taskrun.value}}"
|
||||||
|
|
||||||
- id: json
|
- id: json1
|
||||||
type: io.kestra.plugin.core.debug.Return
|
type: io.kestra.plugin.core.debug.Return
|
||||||
format: "{{inputs.json}}"
|
format: "{{inputs.json1}}"
|
||||||
|
- id: json2
|
||||||
|
type: io.kestra.plugin.core.debug.Return
|
||||||
|
format: "{{inputs.json2}}"
|
||||||
- id: jsonOutput
|
- id: jsonOutput
|
||||||
type: io.kestra.plugin.core.debug.Return
|
type: io.kestra.plugin.core.debug.Return
|
||||||
format: "{{outputs.json.value}}"
|
format: "{{outputs.json1.value}}"
|
||||||
- id: yamlOutput
|
- id: yamlOutput1
|
||||||
type: io.kestra.plugin.core.debug.Return
|
type: io.kestra.plugin.core.debug.Return
|
||||||
format: "{{inputs.yaml}}"
|
format: "{{inputs.yaml1}}"
|
||||||
|
- id: yamlOutput2
|
||||||
|
type: io.kestra.plugin.core.debug.Return
|
||||||
|
format: "{{inputs.yaml2}}"
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
id: switch-in-concurrent-loop
|
||||||
|
namespace: io.kestra.tests
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- id: iterate_and_check_name
|
||||||
|
type: io.kestra.plugin.core.flow.ForEach
|
||||||
|
tasks:
|
||||||
|
- id: switch
|
||||||
|
type: io.kestra.plugin.core.flow.Switch
|
||||||
|
value: "{{ taskrun.value }}"
|
||||||
|
cases:
|
||||||
|
"Alice":
|
||||||
|
- id: OOMCRM_EB_DD_000
|
||||||
|
type: io.kestra.plugin.core.log.Log
|
||||||
|
message: Alice
|
||||||
|
"Bob":
|
||||||
|
- id: OOMCRM_EB_DD_001
|
||||||
|
type: io.kestra.plugin.core.log.Log
|
||||||
|
message: Bob
|
||||||
|
|
||||||
|
values: ["Alice", "Bob"]
|
||||||
|
|
||||||
|
concurrencyLimit: 0
|
||||||
@@ -13,18 +13,19 @@ tasks:
|
|||||||
- io.test.second
|
- io.test.second
|
||||||
- io.test.third
|
- io.test.third
|
||||||
enabled: true
|
enabled: true
|
||||||
|
folderPerNamespace: true
|
||||||
exclude:
|
exclude:
|
||||||
- /ignore/**
|
- /ignore/**
|
||||||
tasks:
|
tasks:
|
||||||
- id: t1
|
- id: t1
|
||||||
type: io.kestra.core.tasks.test.Read
|
type: io.kestra.core.tasks.test.Read
|
||||||
path: "/test/a/b/c/1.txt"
|
path: "/io.test.third/test/a/b/c/1.txt"
|
||||||
- id: t2
|
- id: t2
|
||||||
type: io.kestra.core.tasks.test.Read
|
type: io.kestra.core.tasks.test.Read
|
||||||
path: "/a/b/c/2.txt"
|
path: "/io.test.second/a/b/c/2.txt"
|
||||||
- id: t3
|
- id: t3
|
||||||
type: io.kestra.core.tasks.test.Read
|
type: io.kestra.core.tasks.test.Read
|
||||||
path: "/a/b/3.txt"
|
path: "/io.test.first/a/b/3.txt"
|
||||||
- id: t4
|
- id: t4
|
||||||
type: io.kestra.core.tasks.test.Read
|
type: io.kestra.core.tasks.test.Read
|
||||||
path: "/ignore/4.txt"
|
path: "/ignore/4.txt"
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ public final class H2RepositoryUtils {
|
|||||||
case MONTH:
|
case MONTH:
|
||||||
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM')", Date.class);
|
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM')", Date.class);
|
||||||
case WEEK:
|
case WEEK:
|
||||||
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'YYYY-ww')", Date.class);
|
return DSL.field("DATE_TRUNC('WEEK', \"" + dateField + "\")", Date.class);
|
||||||
case DAY:
|
case DAY:
|
||||||
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM-dd')", Date.class);
|
return DSL.field("FORMATDATETIME(\"" + dateField + "\", 'yyyy-MM-dd')", Date.class);
|
||||||
case HOUR:
|
case HOUR:
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
package io.kestra.runner.h2;
|
||||||
|
|
||||||
|
import io.kestra.core.runners.AbstractRunnerConcurrencyTest;
|
||||||
|
|
||||||
|
public class H2RunnerConcurrencyTest extends AbstractRunnerConcurrencyTest {
|
||||||
|
}
|
||||||
@@ -16,7 +16,7 @@ public final class MysqlRepositoryUtils {
|
|||||||
case MONTH:
|
case MONTH:
|
||||||
return DSL.field("DATE_FORMAT({0}, '%Y-%m')", Date.class, DSL.field(dateField));
|
return DSL.field("DATE_FORMAT({0}, '%Y-%m')", Date.class, DSL.field(dateField));
|
||||||
case WEEK:
|
case WEEK:
|
||||||
return DSL.field("DATE_FORMAT({0}, '%x-%v')", Date.class, DSL.field(dateField));
|
return DSL.field("STR_TO_DATE(CONCAT(YEARWEEK({0}, 3), ' Monday'), '%X%V %W')", Date.class, DSL.field(dateField));
|
||||||
case DAY:
|
case DAY:
|
||||||
return DSL.field("DATE({0})", Date.class, DSL.field(dateField));
|
return DSL.field("DATE({0})", Date.class, DSL.field(dateField));
|
||||||
case HOUR:
|
case HOUR:
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
package io.kestra.runner.mysql;
|
||||||
|
|
||||||
|
import io.kestra.core.runners.AbstractRunnerConcurrencyTest;
|
||||||
|
|
||||||
|
public class MysqlRunnerConcurrencyTest extends AbstractRunnerConcurrencyTest {
|
||||||
|
}
|
||||||
@@ -16,7 +16,7 @@ public final class PostgresRepositoryUtils {
|
|||||||
case MONTH:
|
case MONTH:
|
||||||
return DSL.field("TO_CHAR({0}, 'YYYY-MM')", Date.class, DSL.field(dateField));
|
return DSL.field("TO_CHAR({0}, 'YYYY-MM')", Date.class, DSL.field(dateField));
|
||||||
case WEEK:
|
case WEEK:
|
||||||
return DSL.field("TO_CHAR({0}, 'IYYY-IW')", Date.class, DSL.field(dateField));
|
return DSL.field("DATE_TRUNC('week', {0})", Date.class, DSL.field(dateField));
|
||||||
case DAY:
|
case DAY:
|
||||||
return DSL.field("DATE({0})", Date.class, DSL.field(dateField));
|
return DSL.field("DATE({0})", Date.class, DSL.field(dateField));
|
||||||
case HOUR:
|
case HOUR:
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
package io.kestra.runner.postgres;
|
||||||
|
|
||||||
|
import io.kestra.core.runners.AbstractRunnerConcurrencyTest;
|
||||||
|
|
||||||
|
public class PostgresRunnerConcurrencyTest extends AbstractRunnerConcurrencyTest {
|
||||||
|
}
|
||||||
@@ -639,6 +639,14 @@ public abstract class AbstractJdbcFlowRepository extends AbstractJdbcRepository
|
|||||||
return (SelectConditionStep<R>) select;
|
return (SelectConditionStep<R>) select;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected Name getColumnName(QueryFilter.Field field){
|
||||||
|
if (QueryFilter.Field.FLOW_ID.equals(field)) {
|
||||||
|
return DSL.quotedName("id");
|
||||||
|
} else {
|
||||||
|
return DSL.quotedName(field.name().toLowerCase());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
abstract protected Condition findSourceCodeCondition(String query);
|
abstract protected Condition findSourceCodeCondition(String query);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package io.kestra.jdbc.repository;
|
|||||||
|
|
||||||
import io.kestra.core.exceptions.InvalidQueryFiltersException;
|
import io.kestra.core.exceptions.InvalidQueryFiltersException;
|
||||||
import io.kestra.core.models.QueryFilter;
|
import io.kestra.core.models.QueryFilter;
|
||||||
|
import io.kestra.core.models.QueryFilter.Op;
|
||||||
import io.kestra.core.models.QueryFilter.Resource;
|
import io.kestra.core.models.QueryFilter.Resource;
|
||||||
import io.kestra.core.models.dashboards.ColumnDescriptor;
|
import io.kestra.core.models.dashboards.ColumnDescriptor;
|
||||||
import io.kestra.core.models.dashboards.DataFilter;
|
import io.kestra.core.models.dashboards.DataFilter;
|
||||||
@@ -291,7 +292,7 @@ public abstract class AbstractJdbcRepository {
|
|||||||
}
|
}
|
||||||
// Handle Field.CHILD_FILTER
|
// Handle Field.CHILD_FILTER
|
||||||
if (field.equals(QueryFilter.Field.CHILD_FILTER)) {
|
if (field.equals(QueryFilter.Field.CHILD_FILTER)) {
|
||||||
return handleChildFilter(value);
|
return handleChildFilter(value, operation);
|
||||||
}
|
}
|
||||||
// Handling for Field.MIN_LEVEL
|
// Handling for Field.MIN_LEVEL
|
||||||
if (field.equals(QueryFilter.Field.MIN_LEVEL)) {
|
if (field.equals(QueryFilter.Field.MIN_LEVEL)) {
|
||||||
@@ -322,32 +323,51 @@ public abstract class AbstractJdbcRepository {
|
|||||||
throw new InvalidQueryFiltersException("Label field value must be instance of Map or String");
|
throw new InvalidQueryFiltersException("Label field value must be instance of Map or String");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (field == QueryFilter.Field.KIND) {
|
|
||||||
return applyKindCondition(value,operation);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert the field name to lowercase and quote it
|
// Convert the field name to lowercase and quote it
|
||||||
Name columnName = DSL.quotedName(field.name().toLowerCase());
|
Name columnName = getColumnName(field);
|
||||||
|
|
||||||
// Default handling for other fields
|
// Default handling for other fields
|
||||||
return switch (operation) {
|
return switch (operation) {
|
||||||
case EQUALS -> DSL.field(columnName).eq(value);
|
case EQUALS -> DSL.field(columnName).eq(primitiveOrToString(value));
|
||||||
case NOT_EQUALS -> DSL.field(columnName).ne(value);
|
case NOT_EQUALS -> DSL.field(columnName).ne(primitiveOrToString(value));
|
||||||
case GREATER_THAN -> DSL.field(columnName).greaterThan(value);
|
case GREATER_THAN -> DSL.field(columnName).greaterThan(value);
|
||||||
case LESS_THAN -> DSL.field(columnName).lessThan(value);
|
case LESS_THAN -> DSL.field(columnName).lessThan(value);
|
||||||
case IN -> DSL.field(columnName).in(ListUtils.convertToList(value));
|
case IN -> DSL.field(columnName).in(ListUtils.convertToListString(value));
|
||||||
case NOT_IN -> DSL.field(columnName).notIn(ListUtils.convertToList(value));
|
case NOT_IN -> DSL.field(columnName).notIn(ListUtils.convertToListString(value));
|
||||||
case STARTS_WITH -> DSL.field(columnName).like(value + "%");
|
case STARTS_WITH -> DSL.field(columnName).like(value + "%");
|
||||||
|
|
||||||
case ENDS_WITH -> DSL.field(columnName).like("%" + value);
|
case ENDS_WITH -> DSL.field(columnName).like("%" + value);
|
||||||
case CONTAINS -> DSL.field(columnName).like("%" + value + "%");
|
case CONTAINS -> DSL.field(columnName).like("%" + value + "%");
|
||||||
case REGEX -> DSL.field(columnName).likeRegex((String) value);
|
case REGEX -> DSL.field(columnName).likeRegex((String) value);
|
||||||
case PREFIX -> DSL.field(columnName).like(value + ".%")
|
case PREFIX -> DSL.field(columnName).like(value + "%")
|
||||||
.or(DSL.field(columnName).eq(value));
|
.or(DSL.field(columnName).eq(value));
|
||||||
default -> throw new InvalidQueryFiltersException("Unsupported operation: " + operation);
|
default -> throw new InvalidQueryFiltersException("Unsupported operation: " + operation);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static Object primitiveOrToString(Object o) {
|
||||||
|
if (o == null) return null;
|
||||||
|
|
||||||
|
if (o instanceof Boolean
|
||||||
|
|| o instanceof Byte
|
||||||
|
|| o instanceof Short
|
||||||
|
|| o instanceof Integer
|
||||||
|
|| o instanceof Long
|
||||||
|
|| o instanceof Float
|
||||||
|
|| o instanceof Double
|
||||||
|
|| o instanceof Character
|
||||||
|
|| o instanceof String) {
|
||||||
|
return o;
|
||||||
|
}
|
||||||
|
|
||||||
|
return o.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Name getColumnName(QueryFilter.Field field){
|
||||||
|
return DSL.quotedName(field.name().toLowerCase());
|
||||||
|
}
|
||||||
|
|
||||||
protected Condition findQueryCondition(String query) {
|
protected Condition findQueryCondition(String query) {
|
||||||
throw new InvalidQueryFiltersException("Unsupported operation: ");
|
throw new InvalidQueryFiltersException("Unsupported operation: ");
|
||||||
}
|
}
|
||||||
@@ -391,12 +411,13 @@ public abstract class AbstractJdbcRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Handle CHILD_FILTER field logic
|
// Handle CHILD_FILTER field logic
|
||||||
private Condition handleChildFilter(Object value) {
|
private Condition handleChildFilter(Object value, Op operation) {
|
||||||
ChildFilter childFilter = (value instanceof String val) ? ChildFilter.valueOf(val) : (ChildFilter) value;
|
ChildFilter childFilter = (value instanceof String val) ? ChildFilter.valueOf(val) : (ChildFilter) value;
|
||||||
|
|
||||||
return switch (childFilter) {
|
return switch (operation) {
|
||||||
case CHILD -> field("trigger_execution_id").isNotNull();
|
case EQUALS -> childFilter.equals(ChildFilter.CHILD) ? field("trigger_execution_id").isNotNull() : field("trigger_execution_id").isNull();
|
||||||
case MAIN -> field("trigger_execution_id").isNull();
|
case NOT_EQUALS -> childFilter.equals(ChildFilter.CHILD) ? field("trigger_execution_id").isNull() : field("trigger_execution_id").isNotNull();
|
||||||
|
default -> throw new InvalidQueryFiltersException("Unsupported operation for child filter field: " + operation);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -447,15 +468,6 @@ public abstract class AbstractJdbcRepository {
|
|||||||
default -> throw new InvalidQueryFiltersException("Unsupported operation for SCOPE: " + operation);
|
default -> throw new InvalidQueryFiltersException("Unsupported operation for SCOPE: " + operation);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
private Condition applyKindCondition(Object value, QueryFilter.Op operation) {
|
|
||||||
String kind = value.toString();
|
|
||||||
return switch (operation) {
|
|
||||||
case EQUALS -> field("kind").eq(kind);
|
|
||||||
case NOT_EQUALS -> field("kind").ne(kind);
|
|
||||||
default -> throw new InvalidQueryFiltersException("Unsupported operation for KIND: " + operation);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
protected Field<Date> formatDateField(String dateField, DateUtils.GroupType groupType) {
|
protected Field<Date> formatDateField(String dateField, DateUtils.GroupType groupType) {
|
||||||
throw new UnsupportedOperationException("formatDateField() not implemented");
|
throw new UnsupportedOperationException("formatDateField() not implemented");
|
||||||
|
|||||||
@@ -658,21 +658,16 @@ public class JdbcExecutor implements ExecutorInterface {
|
|||||||
workerTaskResults.add(new WorkerTaskResult(taskRun));
|
workerTaskResults.add(new WorkerTaskResult(taskRun));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// flowable attempt state transition to running
|
// flowable attempt state transition to running
|
||||||
if (workerTask.getTask().isFlowable()) {
|
if (workerTask.getTask().isFlowable()) {
|
||||||
List<TaskRunAttempt> attempts = Optional.ofNullable(workerTask.getTaskRun().getAttempts())
|
|
||||||
.map(ArrayList::new)
|
|
||||||
.orElseGet(ArrayList::new);
|
|
||||||
|
|
||||||
|
|
||||||
attempts.add(
|
|
||||||
TaskRunAttempt.builder()
|
|
||||||
.state(new State().withState(State.Type.RUNNING))
|
|
||||||
.build()
|
|
||||||
);
|
|
||||||
|
|
||||||
TaskRun updatedTaskRun = workerTask.getTaskRun()
|
TaskRun updatedTaskRun = workerTask.getTaskRun()
|
||||||
.withAttempts(attempts)
|
.withAttempts(
|
||||||
|
List.of(
|
||||||
|
TaskRunAttempt.builder()
|
||||||
|
.state(new State().withState(State.Type.RUNNING))
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
)
|
||||||
.withState(State.Type.RUNNING);
|
.withState(State.Type.RUNNING);
|
||||||
|
|
||||||
workerTaskResults.add(new WorkerTaskResult(updatedTaskRun));
|
workerTaskResults.add(new WorkerTaskResult(updatedTaskRun));
|
||||||
@@ -1205,16 +1200,17 @@ public class JdbcExecutor implements ExecutorInterface {
|
|||||||
|
|
||||||
// check if there exist a queued execution and submit it to the execution queue
|
// check if there exist a queued execution and submit it to the execution queue
|
||||||
if (executor.getFlow().getConcurrency() != null) {
|
if (executor.getFlow().getConcurrency() != null) {
|
||||||
|
|
||||||
// decrement execution concurrency limit
|
|
||||||
// if an execution was queued but never running, it would have never been counted inside the concurrency limit and should not lead to popping a new queued execution
|
// if an execution was queued but never running, it would have never been counted inside the concurrency limit and should not lead to popping a new queued execution
|
||||||
// this could only happen for KILLED execution.
|
|
||||||
boolean queuedThenKilled = execution.getState().getCurrent() == State.Type.KILLED
|
boolean queuedThenKilled = execution.getState().getCurrent() == State.Type.KILLED
|
||||||
&& execution.getState().getHistories().stream().anyMatch(h -> h.getState().isQueued())
|
&& execution.getState().getHistories().stream().anyMatch(h -> h.getState().isQueued())
|
||||||
&& execution.getState().getHistories().stream().noneMatch(h -> h.getState().isRunning());
|
&& execution.getState().getHistories().stream().noneMatch(h -> h.getState().onlyRunning());
|
||||||
|
// if an execution was FAILED or CANCELLED due to concurrency limit exceeded, it would have never been counter inside the concurrency limit and should not lead to popping a new queued execution
|
||||||
boolean concurrencyShortCircuitState = Concurrency.possibleTransitions(execution.getState().getCurrent())
|
boolean concurrencyShortCircuitState = Concurrency.possibleTransitions(execution.getState().getCurrent())
|
||||||
&& execution.getState().getHistories().get(execution.getState().getHistories().size() - 2).getState().isCreated();
|
&& execution.getState().getHistories().get(execution.getState().getHistories().size() - 2).getState().isCreated();
|
||||||
if (!queuedThenKilled && !concurrencyShortCircuitState) {
|
// as we may receive multiple time killed execution (one when we kill it, then one for each running worker task), we limit to the first we receive: when the state transitionned from KILLING to KILLED
|
||||||
|
boolean killingThenKilled = execution.getState().getCurrent().isKilled() && executor.getOriginalState() == State.Type.KILLING;
|
||||||
|
if (!queuedThenKilled && !concurrencyShortCircuitState && (!execution.getState().getCurrent().isKilled() || killingThenKilled)) {
|
||||||
|
// decrement execution concurrency limit and pop a new queued execution if needed
|
||||||
concurrencyLimitStorage.decrement(executor.getFlow());
|
concurrencyLimitStorage.decrement(executor.getFlow());
|
||||||
|
|
||||||
if (executor.getFlow().getConcurrency().getBehavior() == Concurrency.Behavior.QUEUE) {
|
if (executor.getFlow().getConcurrency().getBehavior() == Concurrency.Behavior.QUEUE) {
|
||||||
@@ -1250,7 +1246,8 @@ public class JdbcExecutor implements ExecutorInterface {
|
|||||||
// IMPORTANT: this is safe as only the executor is listening to WorkerTaskResult,
|
// IMPORTANT: this is safe as only the executor is listening to WorkerTaskResult,
|
||||||
// and we are sure at this stage that all WorkerJob has been listened and processed by the Worker.
|
// and we are sure at this stage that all WorkerJob has been listened and processed by the Worker.
|
||||||
// If any of these assumptions changed, this code would not be safe anymore.
|
// If any of these assumptions changed, this code would not be safe anymore.
|
||||||
if (cleanWorkerJobQueue && !ListUtils.isEmpty(executor.getExecution().getTaskRunList())) {
|
// One notable exception is for killed flow as the KILLED worker task result may arrive late so removing them is a racy as we may remove them before they are processed
|
||||||
|
if (cleanWorkerJobQueue && !ListUtils.isEmpty(executor.getExecution().getTaskRunList()) && !execution.getState().getCurrent().isKilled()) {
|
||||||
List<String> taskRunKeys = executor.getExecution().getTaskRunList().stream()
|
List<String> taskRunKeys = executor.getExecution().getTaskRunList().stream()
|
||||||
.map(taskRun -> taskRun.getId())
|
.map(taskRun -> taskRun.getId())
|
||||||
.toList();
|
.toList();
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ dependencies {
|
|||||||
def kafkaVersion = "4.1.1"
|
def kafkaVersion = "4.1.1"
|
||||||
def opensearchVersion = "3.2.0"
|
def opensearchVersion = "3.2.0"
|
||||||
def opensearchRestVersion = "3.3.2"
|
def opensearchRestVersion = "3.3.2"
|
||||||
def flyingSaucerVersion = "10.0.5"
|
def flyingSaucerVersion = "10.0.6"
|
||||||
def jacksonVersion = "2.20.1"
|
def jacksonVersion = "2.20.1"
|
||||||
def jacksonAnnotationsVersion = "2.20"
|
def jacksonAnnotationsVersion = "2.20"
|
||||||
def jugVersion = "5.1.1"
|
def jugVersion = "5.2.0"
|
||||||
def langchain4jVersion = "1.9.1"
|
def langchain4jVersion = "1.9.1"
|
||||||
def langchain4jCommunityVersion = "1.9.1-beta17"
|
def langchain4jCommunityVersion = "1.9.1-beta17"
|
||||||
|
|
||||||
@@ -35,7 +35,7 @@ dependencies {
|
|||||||
// we define cloud bom here for GCP, Azure and AWS so they are aligned for all plugins that use them (secret, storage, oss and ee plugins)
|
// we define cloud bom here for GCP, Azure and AWS so they are aligned for all plugins that use them (secret, storage, oss and ee plugins)
|
||||||
api platform('com.google.cloud:libraries-bom:26.72.0')
|
api platform('com.google.cloud:libraries-bom:26.72.0')
|
||||||
api platform("com.azure:azure-sdk-bom:1.3.3")
|
api platform("com.azure:azure-sdk-bom:1.3.3")
|
||||||
api platform('software.amazon.awssdk:bom:2.40.0')
|
api platform('software.amazon.awssdk:bom:2.40.5')
|
||||||
api platform("dev.langchain4j:langchain4j-bom:$langchain4jVersion")
|
api platform("dev.langchain4j:langchain4j-bom:$langchain4jVersion")
|
||||||
api platform("dev.langchain4j:langchain4j-community-bom:$langchain4jCommunityVersion")
|
api platform("dev.langchain4j:langchain4j-community-bom:$langchain4jCommunityVersion")
|
||||||
|
|
||||||
@@ -77,7 +77,7 @@ dependencies {
|
|||||||
api "org.apache.kafka:kafka-clients:$kafkaVersion"
|
api "org.apache.kafka:kafka-clients:$kafkaVersion"
|
||||||
api "org.apache.kafka:kafka-streams:$kafkaVersion"
|
api "org.apache.kafka:kafka-streams:$kafkaVersion"
|
||||||
// AWS CRT is not included in the AWS BOM but needed for the S3 Transfer manager
|
// AWS CRT is not included in the AWS BOM but needed for the S3 Transfer manager
|
||||||
api 'software.amazon.awssdk.crt:aws-crt:0.40.1'
|
api 'software.amazon.awssdk.crt:aws-crt:0.40.3'
|
||||||
|
|
||||||
// Other libs
|
// Other libs
|
||||||
api("org.projectlombok:lombok:1.18.42")
|
api("org.projectlombok:lombok:1.18.42")
|
||||||
@@ -133,7 +133,7 @@ dependencies {
|
|||||||
api 'org.codehaus.plexus:plexus-utils:3.0.24' // https://nvd.nist.gov/vuln/detail/CVE-2022-4244
|
api 'org.codehaus.plexus:plexus-utils:3.0.24' // https://nvd.nist.gov/vuln/detail/CVE-2022-4244
|
||||||
|
|
||||||
// for jOOQ to the same version as we use in EE
|
// for jOOQ to the same version as we use in EE
|
||||||
api ("org.jooq:jooq:3.20.9")
|
api ("org.jooq:jooq:3.20.10")
|
||||||
|
|
||||||
// Tests
|
// Tests
|
||||||
api "org.junit-pioneer:junit-pioneer:2.3.0"
|
api "org.junit-pioneer:junit-pioneer:2.3.0"
|
||||||
@@ -142,7 +142,7 @@ dependencies {
|
|||||||
api group: 'org.exparity', name: 'hamcrest-date', version: '2.0.8'
|
api group: 'org.exparity', name: 'hamcrest-date', version: '2.0.8'
|
||||||
api "org.wiremock:wiremock-jetty12:3.13.2"
|
api "org.wiremock:wiremock-jetty12:3.13.2"
|
||||||
api "org.apache.kafka:kafka-streams-test-utils:$kafkaVersion"
|
api "org.apache.kafka:kafka-streams-test-utils:$kafkaVersion"
|
||||||
api "com.microsoft.playwright:playwright:1.56.0"
|
api "com.microsoft.playwright:playwright:1.57.0"
|
||||||
api "org.awaitility:awaitility:4.3.0"
|
api "org.awaitility:awaitility:4.3.0"
|
||||||
|
|
||||||
// Kestra components
|
// Kestra components
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class DockerService {
|
public class DockerService {
|
||||||
// DDDDDDD
|
|
||||||
public static DockerClient client(DockerClientConfig dockerClientConfig) {
|
public static DockerClient client(DockerClientConfig dockerClientConfig) {
|
||||||
DockerHttpClient dockerHttpClient = new ApacheDockerHttpClient.Builder()
|
DockerHttpClient dockerHttpClient = new ApacheDockerHttpClient.Builder()
|
||||||
.dockerHost(dockerClientConfig.getDockerHost())
|
.dockerHost(dockerClientConfig.getDockerHost())
|
||||||
|
|||||||
@@ -34,7 +34,7 @@
|
|||||||
<!-- Load Google Fonts non-blocking -->
|
<!-- Load Google Fonts non-blocking -->
|
||||||
<link
|
<link
|
||||||
rel="stylesheet"
|
rel="stylesheet"
|
||||||
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
|
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;600;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
|
||||||
media="print"
|
media="print"
|
||||||
onload="this.media='all'"
|
onload="this.media='all'"
|
||||||
>
|
>
|
||||||
@@ -43,7 +43,7 @@
|
|||||||
<noscript>
|
<noscript>
|
||||||
<link
|
<link
|
||||||
rel="stylesheet"
|
rel="stylesheet"
|
||||||
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
|
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;600;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
|
||||||
>
|
>
|
||||||
</noscript>
|
</noscript>
|
||||||
</head>
|
</head>
|
||||||
|
|||||||
41
ui/package-lock.json
generated
41
ui/package-lock.json
generated
@@ -9,8 +9,9 @@
|
|||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"hasInstallScript": true,
|
"hasInstallScript": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@esbuild/linux-x64": "0.27.1",
|
||||||
"@js-joda/core": "^5.6.5",
|
"@js-joda/core": "^5.6.5",
|
||||||
"@kestra-io/ui-libs": "^0.0.263",
|
"@kestra-io/ui-libs": "^0.0.264",
|
||||||
"@vue-flow/background": "^1.3.2",
|
"@vue-flow/background": "^1.3.2",
|
||||||
"@vue-flow/controls": "^1.1.2",
|
"@vue-flow/controls": "^1.1.2",
|
||||||
"@vue-flow/core": "^1.47.0",
|
"@vue-flow/core": "^1.47.0",
|
||||||
@@ -77,7 +78,7 @@
|
|||||||
"@types/humanize-duration": "^3.27.4",
|
"@types/humanize-duration": "^3.27.4",
|
||||||
"@types/js-yaml": "^4.0.9",
|
"@types/js-yaml": "^4.0.9",
|
||||||
"@types/moment": "^2.13.0",
|
"@types/moment": "^2.13.0",
|
||||||
"@types/node": "^24.10.1",
|
"@types/node": "^24.10.2",
|
||||||
"@types/nprogress": "^0.2.3",
|
"@types/nprogress": "^0.2.3",
|
||||||
"@types/path-browserify": "^1.0.3",
|
"@types/path-browserify": "^1.0.3",
|
||||||
"@types/semver": "^7.7.1",
|
"@types/semver": "^7.7.1",
|
||||||
@@ -120,9 +121,9 @@
|
|||||||
"vue-tsc": "^3.1.4"
|
"vue-tsc": "^3.1.4"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@esbuild/darwin-arm64": "^0.27.0",
|
"@esbuild/darwin-arm64": "^0.27.1",
|
||||||
"@esbuild/darwin-x64": "^0.27.0",
|
"@esbuild/darwin-x64": "^0.27.1",
|
||||||
"@esbuild/linux-x64": "^0.27.0",
|
"@esbuild/linux-x64": "^0.27.1",
|
||||||
"@rollup/rollup-darwin-arm64": "^4.53.3",
|
"@rollup/rollup-darwin-arm64": "^4.53.3",
|
||||||
"@rollup/rollup-darwin-x64": "^4.53.3",
|
"@rollup/rollup-darwin-x64": "^4.53.3",
|
||||||
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
|
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
|
||||||
@@ -1342,9 +1343,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@esbuild/darwin-arm64": {
|
"node_modules/@esbuild/darwin-arm64": {
|
||||||
"version": "0.27.0",
|
"version": "0.27.1",
|
||||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.0.tgz",
|
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.1.tgz",
|
||||||
"integrity": "sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg==",
|
"integrity": "sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"arm64"
|
"arm64"
|
||||||
],
|
],
|
||||||
@@ -1358,9 +1359,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@esbuild/darwin-x64": {
|
"node_modules/@esbuild/darwin-x64": {
|
||||||
"version": "0.27.0",
|
"version": "0.27.1",
|
||||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.0.tgz",
|
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.1.tgz",
|
||||||
"integrity": "sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g==",
|
"integrity": "sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64"
|
"x64"
|
||||||
],
|
],
|
||||||
@@ -1544,9 +1545,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@esbuild/linux-x64": {
|
"node_modules/@esbuild/linux-x64": {
|
||||||
"version": "0.27.0",
|
"version": "0.27.1",
|
||||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.0.tgz",
|
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.1.tgz",
|
||||||
"integrity": "sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw==",
|
"integrity": "sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==",
|
||||||
"cpu": [
|
"cpu": [
|
||||||
"x64"
|
"x64"
|
||||||
],
|
],
|
||||||
@@ -2829,9 +2830,9 @@
|
|||||||
"license": "BSD-3-Clause"
|
"license": "BSD-3-Clause"
|
||||||
},
|
},
|
||||||
"node_modules/@kestra-io/ui-libs": {
|
"node_modules/@kestra-io/ui-libs": {
|
||||||
"version": "0.0.263",
|
"version": "0.0.264",
|
||||||
"resolved": "https://registry.npmjs.org/@kestra-io/ui-libs/-/ui-libs-0.0.263.tgz",
|
"resolved": "https://registry.npmjs.org/@kestra-io/ui-libs/-/ui-libs-0.0.264.tgz",
|
||||||
"integrity": "sha512-j1rWqcQAK2CudNBkcDPjUXyaGFeBzJ7QEhPKFAbleHSw0N3QFu/iy0rFZxJNIMWRi1mGZBh74D6vL0OqQJkT2Q==",
|
"integrity": "sha512-yUZDNaE0wUPOuEq/FL/TQBRd1fTV2dyM8s+VcGRjNSM1uv1uZcsSHro56/heHQx17lo00FDcPT7BMKEifrVhBg==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@nuxtjs/mdc": "^0.17.3",
|
"@nuxtjs/mdc": "^0.17.3",
|
||||||
"@popperjs/core": "^2.11.8",
|
"@popperjs/core": "^2.11.8",
|
||||||
@@ -6009,9 +6010,9 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/node": {
|
"node_modules/@types/node": {
|
||||||
"version": "24.10.1",
|
"version": "24.10.2",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.2.tgz",
|
||||||
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
|
"integrity": "sha512-WOhQTZ4G8xZ1tjJTvKOpyEVSGgOTvJAfDK3FNFgELyaTpzhdgHVHeqW8V+UJvzF5BT+/B54T/1S2K6gd9c7bbA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|||||||
@@ -24,7 +24,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@js-joda/core": "^5.6.5",
|
"@js-joda/core": "^5.6.5",
|
||||||
"@kestra-io/ui-libs": "^0.0.263",
|
"@kestra-io/ui-libs": "^0.0.264",
|
||||||
"@vue-flow/background": "^1.3.2",
|
"@vue-flow/background": "^1.3.2",
|
||||||
"@vue-flow/controls": "^1.1.2",
|
"@vue-flow/controls": "^1.1.2",
|
||||||
"@vue-flow/core": "^1.47.0",
|
"@vue-flow/core": "^1.47.0",
|
||||||
@@ -91,7 +91,7 @@
|
|||||||
"@types/humanize-duration": "^3.27.4",
|
"@types/humanize-duration": "^3.27.4",
|
||||||
"@types/js-yaml": "^4.0.9",
|
"@types/js-yaml": "^4.0.9",
|
||||||
"@types/moment": "^2.13.0",
|
"@types/moment": "^2.13.0",
|
||||||
"@types/node": "^24.10.1",
|
"@types/node": "^24.10.2",
|
||||||
"@types/nprogress": "^0.2.3",
|
"@types/nprogress": "^0.2.3",
|
||||||
"@types/path-browserify": "^1.0.3",
|
"@types/path-browserify": "^1.0.3",
|
||||||
"@types/semver": "^7.7.1",
|
"@types/semver": "^7.7.1",
|
||||||
@@ -134,9 +134,9 @@
|
|||||||
"vue-tsc": "^3.1.4"
|
"vue-tsc": "^3.1.4"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"@esbuild/darwin-arm64": "^0.27.0",
|
"@esbuild/darwin-arm64": "^0.27.1",
|
||||||
"@esbuild/darwin-x64": "^0.27.0",
|
"@esbuild/darwin-x64": "^0.27.1",
|
||||||
"@esbuild/linux-x64": "^0.27.0",
|
"@esbuild/linux-x64": "^0.27.1",
|
||||||
"@rollup/rollup-darwin-arm64": "^4.53.3",
|
"@rollup/rollup-darwin-arm64": "^4.53.3",
|
||||||
"@rollup/rollup-darwin-x64": "^4.53.3",
|
"@rollup/rollup-darwin-x64": "^4.53.3",
|
||||||
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
|
"@rollup/rollup-linux-x64-gnu": "^4.53.3",
|
||||||
|
|||||||
BIN
ui/src/assets/fonts/public-sans/public-sans-v21-latin-600.woff2
Normal file
BIN
ui/src/assets/fonts/public-sans/public-sans-v21-latin-600.woff2
Normal file
Binary file not shown.
@@ -1,6 +1,6 @@
|
|||||||
<template>
|
<template>
|
||||||
<el-row :gutter="32">
|
<el-row :gutter="32">
|
||||||
<el-col :span="8" v-for="characteristics in editionCharacteristics" :key="characteristics.name">
|
<el-col :xs="24" :md="8" v-for="characteristics in editionCharacteristics" :key="characteristics.name" class="edition-col">
|
||||||
<EditionCharacteristics
|
<EditionCharacteristics
|
||||||
class="h-100"
|
class="h-100"
|
||||||
:name="characteristics.name"
|
:name="characteristics.name"
|
||||||
@@ -115,3 +115,17 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
<style scoped lang="scss">
|
||||||
|
.edition-col {
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
|
||||||
|
&:last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (min-width: 992px) {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
@@ -184,7 +184,9 @@
|
|||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
gap: 1rem;
|
gap: 1rem;
|
||||||
height: 2rem;
|
min-height: 2rem;
|
||||||
|
padding-top: 0.25rem;
|
||||||
|
padding-bottom: 0.25rem;
|
||||||
|
|
||||||
.usage-icon {
|
.usage-icon {
|
||||||
display: flex;
|
display: flex;
|
||||||
@@ -192,6 +194,7 @@
|
|||||||
justify-content: center;
|
justify-content: center;
|
||||||
width: 24px;
|
width: 24px;
|
||||||
height: 24px;
|
height: 24px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
|
||||||
:deep(.material-design-icon__svg) {
|
:deep(.material-design-icon__svg) {
|
||||||
font-size: 24px;
|
font-size: 24px;
|
||||||
@@ -201,11 +204,9 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.usage-label {
|
.usage-label {
|
||||||
line-height: 1;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
color: var(--ks-content-primary);
|
color: var(--ks-content-primary);
|
||||||
|
line-height: 1.2;
|
||||||
}
|
}
|
||||||
|
|
||||||
.usage-divider {
|
.usage-divider {
|
||||||
@@ -215,15 +216,16 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.usage-value {
|
.usage-value {
|
||||||
line-height: 1;
|
font-size: 14px;
|
||||||
display: flex;
|
line-height: 1.2;
|
||||||
align-items: center;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
.el-button {
|
.el-button {
|
||||||
color: var(--ks-content-primary);
|
color: var(--ks-content-primary);
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
|
flex-shrink: 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -87,7 +87,7 @@
|
|||||||
</el-form-item>
|
</el-form-item>
|
||||||
<div class="password-requirements mb-4">
|
<div class="password-requirements mb-4">
|
||||||
<el-text>
|
<el-text>
|
||||||
8+ chars, 1 upper, 1 number
|
{{ t('setup.form.password_requirements') }}
|
||||||
</el-text>
|
</el-text>
|
||||||
</div>
|
</div>
|
||||||
</el-form>
|
</el-form>
|
||||||
@@ -502,7 +502,7 @@
|
|||||||
localStorage.removeItem("basicAuthUserCreated")
|
localStorage.removeItem("basicAuthUserCreated")
|
||||||
localStorage.setItem("basicAuthSetupCompletedAt", new Date().toISOString())
|
localStorage.setItem("basicAuthSetupCompletedAt", new Date().toISOString())
|
||||||
|
|
||||||
router.push({name: "login"})
|
router.push({name: "welcome"})
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
|||||||
@@ -101,7 +101,7 @@ $checkbox-checked-color: #8405FF;
|
|||||||
|
|
||||||
.el-text {
|
.el-text {
|
||||||
color: var(--ks-content-tertiary);
|
color: var(--ks-content-tertiary);
|
||||||
font-size: 14px;
|
font-size: 12px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ import Utils from "../../../utils/utils";
|
|||||||
import {cssVariable, State} from "@kestra-io/ui-libs";
|
import {cssVariable, State} from "@kestra-io/ui-libs";
|
||||||
import {getSchemeValue} from "../../../utils/scheme";
|
import {getSchemeValue} from "../../../utils/scheme";
|
||||||
|
|
||||||
|
import {useMiscStore} from "override/stores/misc";
|
||||||
|
|
||||||
export function tooltip(tooltipModel: {
|
export function tooltip(tooltipModel: {
|
||||||
title?: string[];
|
title?: string[];
|
||||||
body?: { lines: string[] }[];
|
body?: { lines: string[] }[];
|
||||||
@@ -115,7 +117,7 @@ export function extractState(value: any) {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function chartClick(moment: any, router: any, route: any, event: any, parsedData: any, elements: any, type = "label") {
|
export function chartClick(moment: any, router: any, route: any, event: any, parsedData: any, elements: any, type = "label", filters: Record<string, any> = {}) {
|
||||||
const query: Record<string, any> = {};
|
const query: Record<string, any> = {};
|
||||||
|
|
||||||
if (elements && parsedData) {
|
if (elements && parsedData) {
|
||||||
@@ -192,7 +194,11 @@ export function chartClick(moment: any, router: any, route: any, event: any, par
|
|||||||
params: {
|
params: {
|
||||||
tenant: route.params.tenant,
|
tenant: route.params.tenant,
|
||||||
},
|
},
|
||||||
query: query,
|
query: {
|
||||||
|
...query,
|
||||||
|
...filters,
|
||||||
|
"filters[timeRange][EQUALS]":useMiscStore()?.configs?.chartDefaultDuration ?? "P30D"
|
||||||
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -49,6 +49,8 @@
|
|||||||
showDefault: {type: Boolean, default: false},
|
showDefault: {type: Boolean, default: false},
|
||||||
short: {type: Boolean, default: false},
|
short: {type: Boolean, default: false},
|
||||||
execution: {type: Boolean, default: false},
|
execution: {type: Boolean, default: false},
|
||||||
|
flow: {type: String, default: undefined},
|
||||||
|
namespace: {type: String, default: undefined},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@@ -153,7 +155,10 @@
|
|||||||
if (data.type === "io.kestra.plugin.core.dashboard.data.Logs" || props.execution) {
|
if (data.type === "io.kestra.plugin.core.dashboard.data.Logs" || props.execution) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
chartClick(moment, router, route, {}, parsedData.value, elements, "label");
|
chartClick(moment, router, route, {}, parsedData.value, elements, "label", {
|
||||||
|
...(props.namespace ? {"filters[namespace][IN]": props.namespace} : {}),
|
||||||
|
...(props.flow ? {"filters[flowId][EQUALS]": props.flow} : {})
|
||||||
|
});
|
||||||
},
|
},
|
||||||
}, theme.value);
|
}, theme.value);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -40,12 +40,9 @@ export function useExecutionRoot() {
|
|||||||
title: route.params.id as string,
|
title: route.params.id as string,
|
||||||
breadcrumb: [
|
breadcrumb: [
|
||||||
{
|
{
|
||||||
label: t("flows"),
|
label: t("executions"),
|
||||||
link: {
|
link: {
|
||||||
name: "flows/list",
|
name: "executions/list"
|
||||||
query: {
|
|
||||||
namespace: ns
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -57,17 +54,6 @@ export function useExecutionRoot() {
|
|||||||
id: flowId
|
id: flowId
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
label: t("executions"),
|
|
||||||
link: {
|
|
||||||
name: "flows/update",
|
|
||||||
params: {
|
|
||||||
namespace: ns,
|
|
||||||
id: flowId,
|
|
||||||
tab: "executions"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -120,14 +120,6 @@
|
|||||||
:execution
|
:execution
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<!-- TODO: To be reworked and integrated into the Cascader component -->
|
|
||||||
<TriggerCascader
|
|
||||||
:title="t('trigger')"
|
|
||||||
:empty="t('no_trigger')"
|
|
||||||
:elements="execution.trigger"
|
|
||||||
:execution
|
|
||||||
/>
|
|
||||||
|
|
||||||
<div id="chart">
|
<div id="chart">
|
||||||
<div>
|
<div>
|
||||||
<section>
|
<section>
|
||||||
@@ -151,7 +143,7 @@
|
|||||||
</section>
|
</section>
|
||||||
<TimeSeries
|
<TimeSeries
|
||||||
ref="chartRef"
|
ref="chartRef"
|
||||||
:chart="{...chart, content: YAML_CHART}"
|
:chart
|
||||||
:filters
|
:filters
|
||||||
showDefault
|
showDefault
|
||||||
execution
|
execution
|
||||||
@@ -185,11 +177,9 @@
|
|||||||
import {useI18n} from "vue-i18n";
|
import {useI18n} from "vue-i18n";
|
||||||
const {t} = useI18n({useScope: "global"});
|
const {t} = useI18n({useScope: "global"});
|
||||||
|
|
||||||
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
|
|
||||||
const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("md");
|
|
||||||
|
|
||||||
import moment from "moment";
|
import moment from "moment";
|
||||||
|
|
||||||
|
import {verticalLayout} from "./utils/layout";
|
||||||
import {createLink} from "./utils/links";
|
import {createLink} from "./utils/links";
|
||||||
import Utils from "../../../utils/utils";
|
import Utils from "../../../utils/utils";
|
||||||
import {FilterObject} from "../../../utils/filters";
|
import {FilterObject} from "../../../utils/filters";
|
||||||
@@ -202,8 +192,7 @@
|
|||||||
|
|
||||||
import ErrorAlert from "./components/main/ErrorAlert.vue";
|
import ErrorAlert from "./components/main/ErrorAlert.vue";
|
||||||
import Id from "../../Id.vue";
|
import Id from "../../Id.vue";
|
||||||
import Cascader from "./components/main/Cascader.vue";
|
import Cascader from "./components/main/cascaders/Cascader.vue";
|
||||||
import TriggerCascader from "./components/main/TriggerCascader.vue";
|
|
||||||
import TimeSeries from "../../dashboard/sections/TimeSeries.vue";
|
import TimeSeries from "../../dashboard/sections/TimeSeries.vue";
|
||||||
import PrevNext from "./components/main/PrevNext.vue";
|
import PrevNext from "./components/main/PrevNext.vue";
|
||||||
|
|
||||||
@@ -432,14 +421,21 @@
|
|||||||
title: t("flow_outputs"),
|
title: t("flow_outputs"),
|
||||||
empty: t("no_flow_outputs"),
|
empty: t("no_flow_outputs"),
|
||||||
elements: execution.value?.outputs,
|
elements: execution.value?.outputs,
|
||||||
|
includeDebug: "outputs",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t("trigger"),
|
||||||
|
empty: t("no_trigger"),
|
||||||
|
elements: execution.value?.trigger,
|
||||||
|
includeDebug: "trigger",
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
const options = useValues("executions").VALUES.RELATIVE_DATE.slice(0, -1); // Remove last 365 days option
|
const options = useValues("executions").VALUES.RELATIVE_DATE;
|
||||||
const timerange = ref<string>("PT168H"); // Default to last 7 days
|
const timerange = ref<string>("PT168H"); // Default to last 7 days
|
||||||
|
|
||||||
const chartRef = ref<InstanceType<typeof TimeSeries> | null>(null);
|
const chartRef = ref<InstanceType<typeof TimeSeries> | null>(null);
|
||||||
const chart = yaml.parse(YAML_CHART);
|
const chart = {...yaml.parse(YAML_CHART), content: YAML_CHART};
|
||||||
const filters = computed((): FilterObject[] => {
|
const filters = computed((): FilterObject[] => {
|
||||||
if (!execution.value) return [];
|
if (!execution.value) return [];
|
||||||
|
|
||||||
|
|||||||
@@ -1,639 +0,0 @@
|
|||||||
<template>
|
|
||||||
<div :id="`cascader-${props.title}`">
|
|
||||||
<div class="header">
|
|
||||||
<el-text truncated>
|
|
||||||
{{ props.title }}
|
|
||||||
</el-text>
|
|
||||||
<el-input
|
|
||||||
v-if="props.elements"
|
|
||||||
v-model="filter"
|
|
||||||
:placeholder="$t('search')"
|
|
||||||
:suffixIcon="Magnify"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<el-splitter
|
|
||||||
v-if="props.elements"
|
|
||||||
:layout="verticalLayout ? 'vertical' : 'horizontal'"
|
|
||||||
>
|
|
||||||
<el-splitter-panel
|
|
||||||
v-model:size="leftWidth"
|
|
||||||
:min="'30%'"
|
|
||||||
:max="'70%'"
|
|
||||||
>
|
|
||||||
<div class="d-flex flex-column overflow-x-auto left">
|
|
||||||
<ElCascaderPanel
|
|
||||||
ref="cascader"
|
|
||||||
v-model="selected"
|
|
||||||
:options="filteredOptions"
|
|
||||||
:border="false"
|
|
||||||
class="flex-grow-1 cascader"
|
|
||||||
@change="onSelectionChange"
|
|
||||||
>
|
|
||||||
<template #default="{data}">
|
|
||||||
<div
|
|
||||||
class="w-100 d-flex justify-content-between"
|
|
||||||
@click="onNodeClick(data)"
|
|
||||||
>
|
|
||||||
<div class="pe-5 d-flex">
|
|
||||||
<span>{{ data.label }}</span>
|
|
||||||
</div>
|
|
||||||
<code>
|
|
||||||
<span class="regular">
|
|
||||||
{{ processedValue(data).label }}
|
|
||||||
</span>
|
|
||||||
</code>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
</ElCascaderPanel>
|
|
||||||
</div>
|
|
||||||
</el-splitter-panel>
|
|
||||||
<el-splitter-panel v-model:size="rightWidth">
|
|
||||||
<div class="right wrapper">
|
|
||||||
<div class="w-100 overflow-auto debug-wrapper">
|
|
||||||
<div class="debug">
|
|
||||||
<div class="debug-title mb-3">
|
|
||||||
<span>{{ $t("eval.render") }}</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="d-flex flex-column p-3 debug">
|
|
||||||
<Editor
|
|
||||||
ref="debugEditor"
|
|
||||||
:fullHeight="false"
|
|
||||||
:customHeight="20"
|
|
||||||
:input="true"
|
|
||||||
:navbar="false"
|
|
||||||
:modelValue="computedDebugValue"
|
|
||||||
@update:model-value="editorValue = $event"
|
|
||||||
@confirm="onDebugExpression($event)"
|
|
||||||
class="w-100"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<el-button
|
|
||||||
type="primary"
|
|
||||||
:icon="Refresh"
|
|
||||||
@click="
|
|
||||||
onDebugExpression(
|
|
||||||
editorValue.length > 0
|
|
||||||
? editorValue
|
|
||||||
: computedDebugValue,
|
|
||||||
)
|
|
||||||
"
|
|
||||||
class="mt-3"
|
|
||||||
>
|
|
||||||
{{ $t("eval.render") }}
|
|
||||||
</el-button>
|
|
||||||
|
|
||||||
<Editor
|
|
||||||
v-if="debugExpression"
|
|
||||||
:readOnly="true"
|
|
||||||
:input="true"
|
|
||||||
:fullHeight="false"
|
|
||||||
:customHeight="20"
|
|
||||||
:navbar="false"
|
|
||||||
:modelValue="debugExpression"
|
|
||||||
:lang="isJSON ? 'json' : ''"
|
|
||||||
class="mt-3"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<el-alert
|
|
||||||
v-if="debugError"
|
|
||||||
type="error"
|
|
||||||
:closable="false"
|
|
||||||
class="overflow-auto"
|
|
||||||
>
|
|
||||||
<p>
|
|
||||||
<strong>{{ debugError }}</strong>
|
|
||||||
</p>
|
|
||||||
<div class="my-2">
|
|
||||||
<CopyToClipboard
|
|
||||||
:text="`${debugError}\n\n${debugStackTrace}`"
|
|
||||||
label="Copy Error"
|
|
||||||
class="d-inline-block me-2"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<pre class="mb-0" style="overflow: scroll">{{
|
|
||||||
debugStackTrace
|
|
||||||
}}</pre>
|
|
||||||
</el-alert>
|
|
||||||
|
|
||||||
<VarValue
|
|
||||||
v-if="selectedValue && displayVarValue()"
|
|
||||||
:value="
|
|
||||||
selectedValue?.uri
|
|
||||||
? selectedValue?.uri
|
|
||||||
: selectedValue
|
|
||||||
"
|
|
||||||
:execution="execution"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</el-splitter-panel>
|
|
||||||
</el-splitter>
|
|
||||||
<span v-else class="empty">{{ props.empty }}</span>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
import {ref, computed, watch, onMounted} from "vue";
|
|
||||||
import {ElCascaderPanel} from "element-plus";
|
|
||||||
import CopyToClipboard from "../../../../layout/CopyToClipboard.vue";
|
|
||||||
import Magnify from "vue-material-design-icons/Magnify.vue";
|
|
||||||
import Editor from "../../../../inputs/Editor.vue";
|
|
||||||
import VarValue from "../../../VarValue.vue";
|
|
||||||
import Refresh from "vue-material-design-icons/Refresh.vue";
|
|
||||||
|
|
||||||
onMounted(() => {
|
|
||||||
if (props.elements) formatted.value = format(props.elements);
|
|
||||||
|
|
||||||
// Open first node by default on page mount
|
|
||||||
if (cascader?.value) {
|
|
||||||
const nodes = cascader.value.$el.querySelectorAll(".el-cascader-node");
|
|
||||||
if (nodes.length > 0) (nodes[0] as HTMLElement).click();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
interface CascaderOption {
|
|
||||||
label: string;
|
|
||||||
value: string;
|
|
||||||
children?: CascaderOption[];
|
|
||||||
path?: string;
|
|
||||||
[key: string]: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
const props = defineProps<{
|
|
||||||
title: string;
|
|
||||||
empty: string;
|
|
||||||
elements?: CascaderOption;
|
|
||||||
execution: any;
|
|
||||||
}>();
|
|
||||||
|
|
||||||
const cascader = ref<any>(null);
|
|
||||||
const debugEditor = ref<InstanceType<typeof Editor>>();
|
|
||||||
const selected = ref<string[]>([]);
|
|
||||||
const editorValue = ref("");
|
|
||||||
const debugExpression = ref("");
|
|
||||||
const debugError = ref("");
|
|
||||||
const debugStackTrace = ref("");
|
|
||||||
const isJSON = ref(false);
|
|
||||||
const expandedValue = ref("");
|
|
||||||
|
|
||||||
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
|
|
||||||
const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("md");
|
|
||||||
|
|
||||||
const leftWidth = verticalLayout ? ref("50%") : ref("80%");
|
|
||||||
const rightWidth = verticalLayout ? ref("50%") : ref("20%");
|
|
||||||
|
|
||||||
const formatted = ref<Node[]>([]);
|
|
||||||
const format = (obj: Record<string, any>): Node[] => {
|
|
||||||
return Object.entries(obj).map(([key, value]) => {
|
|
||||||
const children =
|
|
||||||
typeof value === "object" && value !== null
|
|
||||||
? Object.entries(value).map(([k, v]) => format({[k]: v})[0])
|
|
||||||
: [{label: value, value: value}];
|
|
||||||
|
|
||||||
// Filter out children with undefined label and value
|
|
||||||
const filteredChildren = children.filter(
|
|
||||||
(child) => child.label !== undefined || child.value !== undefined,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Return node with or without children based on existence
|
|
||||||
const node = {label: key, value: key};
|
|
||||||
|
|
||||||
// Include children only if there are valid entries
|
|
||||||
if (filteredChildren.length) {
|
|
||||||
node.children = filteredChildren;
|
|
||||||
}
|
|
||||||
|
|
||||||
return node;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
const filter = ref("");
|
|
||||||
const filteredOptions = computed(() => {
|
|
||||||
if (filter.value === "") return formatted.value;
|
|
||||||
|
|
||||||
const lowercase = filter.value.toLowerCase();
|
|
||||||
return formatted.value.filter((node) => {
|
|
||||||
const matchesNode = node.label.toLowerCase().includes(lowercase);
|
|
||||||
|
|
||||||
if (!node.children) return matchesNode;
|
|
||||||
|
|
||||||
const matchesChildren = node.children.some((c) =>
|
|
||||||
c.label.toLowerCase().includes(lowercase),
|
|
||||||
);
|
|
||||||
|
|
||||||
return matchesNode || matchesChildren;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const selectedValue = computed(() => {
|
|
||||||
if (!selected.value?.length) return null;
|
|
||||||
|
|
||||||
const node = selectedNode();
|
|
||||||
return node?.value || node?.label;
|
|
||||||
});
|
|
||||||
|
|
||||||
const computedDebugValue = computed(() => {
|
|
||||||
if (selected.value?.length) {
|
|
||||||
const path = selected.value.join(".");
|
|
||||||
return `{{ trigger.${path} }}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (expandedValue.value) {
|
|
||||||
return `{{ trigger.${expandedValue.value} }}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return "{{ trigger }}";
|
|
||||||
});
|
|
||||||
|
|
||||||
function selectedNode(): CascaderOption | null {
|
|
||||||
if (!selected.value?.length) return null;
|
|
||||||
|
|
||||||
let currentOptions: CascaderOption[] = props.elements;
|
|
||||||
let currentNode: CascaderOption | undefined = undefined;
|
|
||||||
|
|
||||||
for (const value of selected.value) {
|
|
||||||
currentNode = currentOptions?.find(
|
|
||||||
(option) => option.value === value || option.label === value,
|
|
||||||
);
|
|
||||||
if (currentNode?.children) {
|
|
||||||
currentOptions = currentNode.children;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return currentNode || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function processedValue(data: any) {
|
|
||||||
const trim = (value: any) =>
|
|
||||||
typeof value !== "string" || value.length < 16
|
|
||||||
? value
|
|
||||||
: `${value.substring(0, 16)}...`;
|
|
||||||
|
|
||||||
return {
|
|
||||||
label: trim(data.value || data.label),
|
|
||||||
regular: typeof data.value !== "object",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function onNodeClick(data: any) {
|
|
||||||
let path = "";
|
|
||||||
|
|
||||||
if (selected.value?.length) {
|
|
||||||
path = selected.value.join(".");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!path) {
|
|
||||||
const findNodePath = (
|
|
||||||
options: Record<string, any>[],
|
|
||||||
targetNode: any,
|
|
||||||
currentPath: string[] = [],
|
|
||||||
): string[] | null => {
|
|
||||||
const localOptions = Array.isArray(options)
|
|
||||||
? options
|
|
||||||
: [options]
|
|
||||||
for (const option of localOptions) {
|
|
||||||
const newPath = [...currentPath, option.value || option.label];
|
|
||||||
|
|
||||||
if (
|
|
||||||
option.value === targetNode.value ||
|
|
||||||
option.label === targetNode.label ||
|
|
||||||
option.value === (targetNode.value || targetNode.label) ||
|
|
||||||
option.label === (targetNode.value || targetNode.label)
|
|
||||||
) {
|
|
||||||
return newPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (option.children) {
|
|
||||||
const found = findNodePath(
|
|
||||||
option.children ?? [],
|
|
||||||
targetNode,
|
|
||||||
newPath,
|
|
||||||
);
|
|
||||||
if (found) return found;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
const nodePath = findNodePath(props.elements ?? [], data);
|
|
||||||
path = nodePath ? nodePath.join(".") : "";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (path) {
|
|
||||||
expandedValue.value = path;
|
|
||||||
debugExpression.value = "";
|
|
||||||
debugError.value = "";
|
|
||||||
debugStackTrace.value = "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function onSelectionChange(value: any) {
|
|
||||||
if (value?.length) {
|
|
||||||
const path = value.join(".");
|
|
||||||
expandedValue.value = path;
|
|
||||||
debugExpression.value = "";
|
|
||||||
debugError.value = "";
|
|
||||||
debugStackTrace.value = "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function displayVarValue(): boolean {
|
|
||||||
return Boolean(
|
|
||||||
selectedValue.value &&
|
|
||||||
typeof selectedValue.value === "string" &&
|
|
||||||
(selectedValue.value.startsWith("kestra://") ||
|
|
||||||
selectedValue.value.startsWith("http://") ||
|
|
||||||
selectedValue.value.startsWith("https://")),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function evaluateExpression(expression: string, trigger: any): any {
|
|
||||||
try {
|
|
||||||
const cleanExpression = expression
|
|
||||||
.replace(/^\{\{\s*/, "")
|
|
||||||
.replace(/\s*\}\}$/, "")
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
if (cleanExpression === "trigger") {
|
|
||||||
return trigger;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!cleanExpression.startsWith("trigger.")) {
|
|
||||||
throw new Error("Expression must start with \"trigger.\"");
|
|
||||||
}
|
|
||||||
|
|
||||||
const path = cleanExpression.substring(8);
|
|
||||||
const parts = path.split(".");
|
|
||||||
let result = trigger;
|
|
||||||
|
|
||||||
for (const part of parts) {
|
|
||||||
if (result && typeof result === "object" && part in result) {
|
|
||||||
result = result[part];
|
|
||||||
} else {
|
|
||||||
throw new Error(`Property "${part}" not found`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error: any) {
|
|
||||||
throw new Error(`Failed to evaluate expression: ${error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function onDebugExpression(expression: string): void {
|
|
||||||
try {
|
|
||||||
debugError.value = "";
|
|
||||||
debugStackTrace.value = "";
|
|
||||||
|
|
||||||
const result = evaluateExpression(expression, props.execution?.trigger);
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (typeof result === "object" && result !== null) {
|
|
||||||
debugExpression.value = JSON.stringify(result, null, 2);
|
|
||||||
isJSON.value = true;
|
|
||||||
} else {
|
|
||||||
debugExpression.value = String(result);
|
|
||||||
isJSON.value = false;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
debugExpression.value = String(result);
|
|
||||||
isJSON.value = false;
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
debugError.value = error.message || "Failed to evaluate expression";
|
|
||||||
debugStackTrace.value = error.stack || "";
|
|
||||||
debugExpression.value = "";
|
|
||||||
isJSON.value = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
watch(
|
|
||||||
selected,
|
|
||||||
(newValue) => {
|
|
||||||
if (newValue?.length) {
|
|
||||||
const path = newValue.join(".");
|
|
||||||
expandedValue.value = path;
|
|
||||||
debugExpression.value = "";
|
|
||||||
debugError.value = "";
|
|
||||||
debugStackTrace.value = "";
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{deep: true},
|
|
||||||
);
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style scoped lang="scss">
|
|
||||||
.outputs {
|
|
||||||
height: fit-content;
|
|
||||||
display: flex;
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.left {
|
|
||||||
overflow-x: auto;
|
|
||||||
height: 100%;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
:deep(.el-cascader-panel) {
|
|
||||||
min-height: 197px;
|
|
||||||
height: 100%;
|
|
||||||
border: 1px solid var(--ks-border-primary);
|
|
||||||
border-radius: 0;
|
|
||||||
overflow-x: auto !important;
|
|
||||||
overflow-y: hidden !important;
|
|
||||||
|
|
||||||
.el-scrollbar.el-cascader-menu:nth-of-type(-n + 2) ul li:first-child {
|
|
||||||
pointer-events: auto !important;
|
|
||||||
margin: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-node {
|
|
||||||
pointer-events: auto !important;
|
|
||||||
cursor: pointer !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-panel__wrap {
|
|
||||||
overflow-x: auto !important;
|
|
||||||
display: flex !important;
|
|
||||||
min-width: max-content !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-menu {
|
|
||||||
min-width: 300px;
|
|
||||||
max-width: 300px;
|
|
||||||
flex-shrink: 0;
|
|
||||||
|
|
||||||
&:last-child {
|
|
||||||
border-right: 1px solid var(--ks-border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-menu__wrap {
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-node {
|
|
||||||
height: 36px;
|
|
||||||
line-height: 36px;
|
|
||||||
font-size: var(--el-font-size-small);
|
|
||||||
color: var(--ks-content-primary);
|
|
||||||
|
|
||||||
&[aria-haspopup="false"] {
|
|
||||||
padding-right: 0.5rem !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
background-color: var(--ks-border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
&.in-active-path,
|
|
||||||
&.is-active {
|
|
||||||
background-color: var(--ks-border-primary);
|
|
||||||
font-weight: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-node__prefix {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
code span.regular {
|
|
||||||
color: var(--ks-content-primary);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
:deep(.el-cascader-node) {
|
|
||||||
cursor: pointer;
|
|
||||||
margin: 0 !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-menu__list {
|
|
||||||
padding: 6px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wrapper {
|
|
||||||
height: fit-content;
|
|
||||||
overflow: hidden;
|
|
||||||
z-index: 1000;
|
|
||||||
height: 100%;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
|
|
||||||
.debug-wrapper {
|
|
||||||
min-height: 197px;
|
|
||||||
border: 1px solid var(--ks-border-primary);
|
|
||||||
border-left-width: 0.5px;
|
|
||||||
border-radius: 0;
|
|
||||||
padding: 0;
|
|
||||||
background-color: var(--ks-background-body);
|
|
||||||
flex: 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
.debug-title {
|
|
||||||
padding: 12px 16px;
|
|
||||||
background-color: var(--ks-background-body);
|
|
||||||
font-weight: bold;
|
|
||||||
font-size: var(--el-font-size-base);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (max-width: 768px) {
|
|
||||||
.outputs {
|
|
||||||
height: 600px;
|
|
||||||
margin-top: 15px;
|
|
||||||
}
|
|
||||||
:deep(.el-cascader-panel) {
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@import "@kestra-io/ui-libs/src/scss/variables";
|
|
||||||
|
|
||||||
[id^="cascader-"] {
|
|
||||||
overflow: hidden;
|
|
||||||
|
|
||||||
.header {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
padding-bottom: $spacer;
|
|
||||||
|
|
||||||
> .el-text {
|
|
||||||
width: 100%;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
font-size: $font-size-xl;
|
|
||||||
}
|
|
||||||
|
|
||||||
> .el-input {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
width: calc($spacer * 16);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-panel {
|
|
||||||
overflow: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
.empty {
|
|
||||||
font-size: $font-size-sm;
|
|
||||||
color: var(--ks-content-secondary);
|
|
||||||
}
|
|
||||||
|
|
||||||
:deep(.el-cascader-menu) {
|
|
||||||
min-width: 300px;
|
|
||||||
max-width: 300px;
|
|
||||||
|
|
||||||
.el-cascader-menu__list {
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-menu__wrap {
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.node {
|
|
||||||
width: 100%;
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .el-cascader-node {
|
|
||||||
height: 36px;
|
|
||||||
line-height: 36px;
|
|
||||||
font-size: $font-size-sm;
|
|
||||||
color: var(--ks-content-primary);
|
|
||||||
padding: 0 30px 0 5px;
|
|
||||||
|
|
||||||
&[aria-haspopup="false"] {
|
|
||||||
padding-right: 0.5rem !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
background-color: var(--ks-border-primary);
|
|
||||||
}
|
|
||||||
|
|
||||||
&.in-active-path,
|
|
||||||
&.is-active {
|
|
||||||
background-color: var(--ks-border-primary);
|
|
||||||
font-weight: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
.el-cascader-node__prefix {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
code span.regular {
|
|
||||||
color: var(--ks-content-primary);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
<template>
|
<template>
|
||||||
<div :id="`cascader-${props.title}`">
|
<div :id="cascaderID">
|
||||||
<div class="header">
|
<div class="header">
|
||||||
<el-text truncated>
|
<el-text truncated>
|
||||||
{{ props.title }}
|
{{ props.title }}
|
||||||
@@ -12,70 +12,86 @@
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<el-cascader-panel
|
<template v-if="props.elements">
|
||||||
v-if="props.elements"
|
<el-splitter
|
||||||
ref="cascader"
|
v-if="props.includeDebug"
|
||||||
:options="filteredOptions"
|
:layout="verticalLayout ? 'vertical' : 'horizontal'"
|
||||||
>
|
lazy
|
||||||
<template #default="{data}">
|
>
|
||||||
<VarValue
|
<el-splitter-panel :size="verticalLayout ? '50%' : '70%'">
|
||||||
v-if="isFile(data.value)"
|
<el-cascader-panel
|
||||||
:value="data.value"
|
:options="filteredOptions"
|
||||||
:execution="props.execution"
|
@expand-change="(p: string[]) => (path = p.join('.'))"
|
||||||
class="node"
|
class="debug"
|
||||||
/>
|
>
|
||||||
<div v-else class="node">
|
<template #default="{data}">
|
||||||
<div :title="data.label">
|
<div class="node">
|
||||||
{{ data.label }}
|
<div :title="data.label">
|
||||||
|
{{ data.label }}
|
||||||
|
</div>
|
||||||
|
<div v-if="data.value && data.children">
|
||||||
|
<code>{{ itemsCount(data) }}</code>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
</el-cascader-panel>
|
||||||
|
</el-splitter-panel>
|
||||||
|
<el-splitter-panel>
|
||||||
|
<DebugPanel
|
||||||
|
:property="props.includeDebug"
|
||||||
|
:execution
|
||||||
|
:path
|
||||||
|
/>
|
||||||
|
</el-splitter-panel>
|
||||||
|
</el-splitter>
|
||||||
|
|
||||||
|
<el-cascader-panel v-else :options="filteredOptions">
|
||||||
|
<template #default="{data}">
|
||||||
|
<div class="node">
|
||||||
|
<div :title="data.label">
|
||||||
|
{{ data.label }}
|
||||||
|
</div>
|
||||||
|
<div v-if="data.value && data.children">
|
||||||
|
<code>{{ itemsCount(data) }}</code>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div v-if="data.value && data.children">
|
</template>
|
||||||
<code>
|
</el-cascader-panel>
|
||||||
{{ data.children.length }}
|
</template>
|
||||||
{{
|
|
||||||
$t(
|
|
||||||
data.children.length === 1
|
|
||||||
? "item"
|
|
||||||
: "items",
|
|
||||||
)
|
|
||||||
}}
|
|
||||||
</code>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
</el-cascader-panel>
|
|
||||||
|
|
||||||
<span v-else class="empty">{{ props.empty }}</span>
|
<span v-else class="empty">{{ props.empty }}</span>
|
||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import {onMounted, computed, ref} from "vue";
|
import {onMounted, nextTick, computed, ref} from "vue";
|
||||||
|
|
||||||
import VarValue from "../../../VarValue.vue";
|
import DebugPanel from "./DebugPanel.vue";
|
||||||
|
|
||||||
import {Execution} from "../../../../../stores/executions";
|
import {Execution} from "../../../../../../stores/executions";
|
||||||
|
|
||||||
|
import {verticalLayout} from "../../../utils/layout";
|
||||||
|
|
||||||
|
import {useI18n} from "vue-i18n";
|
||||||
|
const {t} = useI18n({useScope: "global"});
|
||||||
|
|
||||||
import Magnify from "vue-material-design-icons/Magnify.vue";
|
import Magnify from "vue-material-design-icons/Magnify.vue";
|
||||||
|
|
||||||
|
export interface Node {
|
||||||
|
label: string;
|
||||||
|
value: string;
|
||||||
|
children?: Node[];
|
||||||
|
}
|
||||||
|
|
||||||
const props = defineProps<{
|
const props = defineProps<{
|
||||||
title: string;
|
title: string;
|
||||||
empty: string;
|
empty: string;
|
||||||
elements?: Record<string, any>;
|
elements?: Record<string, any>;
|
||||||
|
includeDebug?: "outputs" | "trigger";
|
||||||
execution: Execution;
|
execution: Execution;
|
||||||
}>();
|
}>();
|
||||||
|
|
||||||
const isFile = (data: any) => {
|
const path = ref<string>("");
|
||||||
if (typeof data !== "string") return false;
|
|
||||||
|
|
||||||
const prefixes = ["kestra:///", "file://", "nsfile://"];
|
|
||||||
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
||||||
};
|
|
||||||
|
|
||||||
interface Node {
|
|
||||||
label: string;
|
|
||||||
value: string;
|
|
||||||
children?: Node[];
|
|
||||||
}
|
|
||||||
|
|
||||||
const formatted = ref<Node[]>([]);
|
const formatted = ref<Node[]>([]);
|
||||||
const format = (obj: Record<string, any>): Node[] => {
|
const format = (obj: Record<string, any>): Node[] => {
|
||||||
@@ -114,15 +130,25 @@
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const cascader = ref<any>(null);
|
const itemsCount = (item: Node) => {
|
||||||
onMounted(() => {
|
const length = item.children?.length ?? 0;
|
||||||
|
|
||||||
|
if (!length) return undefined;
|
||||||
|
|
||||||
|
return `${length} ${length === 1 ? t("item") : t("items")}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const cascaderID = `cascader-${props.title.toLowerCase().replace(/\s+/g, "-")}`;
|
||||||
|
onMounted(async () => {
|
||||||
if (props.elements) formatted.value = format(props.elements);
|
if (props.elements) formatted.value = format(props.elements);
|
||||||
|
|
||||||
// Open first node by default on page mount
|
await nextTick(() => {
|
||||||
if (cascader?.value) {
|
// Open first node by default on page mount
|
||||||
const nodes = cascader.value.$el.querySelectorAll(".el-cascader-node");
|
const selector = `#${cascaderID} .el-cascader-node`;
|
||||||
|
const nodes = document.querySelectorAll(selector);
|
||||||
|
|
||||||
if (nodes.length > 0) (nodes[0] as HTMLElement).click();
|
if (nodes.length > 0) (nodes[0] as HTMLElement).click();
|
||||||
}
|
});
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -154,6 +180,12 @@
|
|||||||
|
|
||||||
.el-cascader-panel {
|
.el-cascader-panel {
|
||||||
overflow: auto;
|
overflow: auto;
|
||||||
|
|
||||||
|
&.debug {
|
||||||
|
min-height: -webkit-fill-available;
|
||||||
|
border-top-right-radius: 0;
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.empty {
|
.empty {
|
||||||
@@ -0,0 +1,182 @@
|
|||||||
|
<template>
|
||||||
|
<div id="debug">
|
||||||
|
<Editor
|
||||||
|
v-model="expression"
|
||||||
|
:shouldFocus="false"
|
||||||
|
:navbar="false"
|
||||||
|
input
|
||||||
|
class="expression"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<div class="buttons">
|
||||||
|
<el-button type="primary" :icon="Refresh" @click="onRender">
|
||||||
|
{{ $t("eval.render") }}
|
||||||
|
</el-button>
|
||||||
|
<el-button
|
||||||
|
:disabled="!result && !error"
|
||||||
|
:icon="CloseCircleOutline"
|
||||||
|
@click="clearAll"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<template v-if="result">
|
||||||
|
<VarValue v-if="isFile" :value="result.value" :execution />
|
||||||
|
|
||||||
|
<Editor
|
||||||
|
v-else
|
||||||
|
v-model="result.value"
|
||||||
|
:shouldFocus="false"
|
||||||
|
:navbar="false"
|
||||||
|
input
|
||||||
|
readOnly
|
||||||
|
:lang="result.type"
|
||||||
|
class="result"
|
||||||
|
/>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<el-alert
|
||||||
|
v-else-if="error"
|
||||||
|
type="error"
|
||||||
|
:title="error"
|
||||||
|
showIcon
|
||||||
|
:closable="false"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script setup lang="ts">
|
||||||
|
import {watch, ref, computed} from "vue";
|
||||||
|
|
||||||
|
import Editor from "../../../../../inputs/Editor.vue";
|
||||||
|
import VarValue from "../../../../VarValue.vue";
|
||||||
|
|
||||||
|
import {Execution} from "../../../../../../stores/executions";
|
||||||
|
|
||||||
|
import Refresh from "vue-material-design-icons/Refresh.vue";
|
||||||
|
import CloseCircleOutline from "vue-material-design-icons/CloseCircleOutline.vue";
|
||||||
|
|
||||||
|
const props = defineProps<{
|
||||||
|
property: "outputs" | "trigger";
|
||||||
|
execution: Execution;
|
||||||
|
path: string;
|
||||||
|
}>();
|
||||||
|
|
||||||
|
const result = ref<{ value: string; type: string } | undefined>(undefined);
|
||||||
|
const error = ref<string | undefined>(undefined);
|
||||||
|
|
||||||
|
const clearAll = () => {
|
||||||
|
result.value = undefined;
|
||||||
|
error.value = undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
const isFile = computed(() => {
|
||||||
|
if (!result.value || typeof result.value.value !== "string") return false;
|
||||||
|
|
||||||
|
const prefixes = ["kestra:///", "file://", "nsfile://"];
|
||||||
|
return prefixes.some((prefix) => result.value!.value.startsWith(prefix));
|
||||||
|
});
|
||||||
|
|
||||||
|
const expression = ref<string>("");
|
||||||
|
watch(
|
||||||
|
() => props.path,
|
||||||
|
(path?: string) => {
|
||||||
|
result.value = undefined;
|
||||||
|
expression.value = `{{ ${props.property}${path ? `.${path}` : ""} }}`;
|
||||||
|
},
|
||||||
|
{immediate: true},
|
||||||
|
);
|
||||||
|
|
||||||
|
const onRender = () => {
|
||||||
|
if (!props.execution) return;
|
||||||
|
|
||||||
|
result.value = undefined;
|
||||||
|
error.value = undefined;
|
||||||
|
|
||||||
|
const clean = expression.value
|
||||||
|
.replace(/^\{\{\s*/, "")
|
||||||
|
.replace(/\s*\}\}$/, "")
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
if (clean === "outputs" || clean === "trigger") {
|
||||||
|
result.value = {
|
||||||
|
value: JSON.stringify(props.execution[props.property], null, 2),
|
||||||
|
type: "json",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!clean.startsWith("outputs.") && !clean.startsWith("trigger.")) {
|
||||||
|
result.value = undefined;
|
||||||
|
error.value = `Expression must start with "{{ ${props.property}. }}"`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = clean.substring(props.property.length + 1).split(".");
|
||||||
|
let target: any = props.execution[props.property];
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
if (target && typeof target === "object" && part in target) {
|
||||||
|
target = target[part];
|
||||||
|
} else {
|
||||||
|
result.value = undefined;
|
||||||
|
error.value = `Property "${part}" does not exist on ${props.property}`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (target && typeof target === "object") {
|
||||||
|
result.value = {
|
||||||
|
value: JSON.stringify(target, null, 2),
|
||||||
|
type: "json",
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
result.value = {value: String(target), type: "text"};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style scoped lang="scss">
|
||||||
|
@import "@kestra-io/ui-libs/src/scss/variables";
|
||||||
|
|
||||||
|
#debug {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
height: 100%;
|
||||||
|
padding: calc($spacer / 2) $spacer;
|
||||||
|
border: 1px solid var(--el-border-color-light);
|
||||||
|
|
||||||
|
:deep(.ks-editor) {
|
||||||
|
&.expression {
|
||||||
|
height: calc($spacer * 2);
|
||||||
|
margin-bottom: $spacer;
|
||||||
|
}
|
||||||
|
|
||||||
|
&.result {
|
||||||
|
height: calc($spacer * 10);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.buttons {
|
||||||
|
display: inline-flex;
|
||||||
|
|
||||||
|
& :deep(.el-button) {
|
||||||
|
width: 100%;
|
||||||
|
margin-bottom: $spacer;
|
||||||
|
padding: $spacer;
|
||||||
|
font-size: $font-size-sm;
|
||||||
|
overflow: hidden;
|
||||||
|
|
||||||
|
span:not(i span) {
|
||||||
|
display: block;
|
||||||
|
min-width: 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
& :deep(.el-button:nth-of-type(2)) {
|
||||||
|
width: calc($spacer * 4);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
3
ui/src/components/executions/overview/utils/layout.ts
Normal file
3
ui/src/components/executions/overview/utils/layout.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
import {useBreakpoints, breakpointsElement} from "@vueuse/core";
|
||||||
|
|
||||||
|
export const verticalLayout = useBreakpoints(breakpointsElement).smallerOrEqual("md");
|
||||||
@@ -108,6 +108,8 @@ export function useFilters(
|
|||||||
const query = {...route.query};
|
const query = {...route.query};
|
||||||
clearFilterQueryParams(query);
|
clearFilterQueryParams(query);
|
||||||
|
|
||||||
|
delete query.page;
|
||||||
|
|
||||||
if (legacyQuery) {
|
if (legacyQuery) {
|
||||||
clearLegacyParams(query);
|
clearLegacyParams(query);
|
||||||
buildLegacyQuery(query);
|
buildLegacyQuery(query);
|
||||||
|
|||||||
@@ -67,6 +67,7 @@ export const useDashboardFilter = (): ComputedRef<FilterConfiguration> => {
|
|||||||
const {VALUES} = useValues("executions");
|
const {VALUES} = useValues("executions");
|
||||||
return VALUES.EXECUTION_STATES;
|
return VALUES.EXECUTION_STATES;
|
||||||
},
|
},
|
||||||
|
searchable: true,
|
||||||
showComparatorSelection: true
|
showComparatorSelection: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ export const useFlowExecutionFilter = (): ComputedRef<FilterConfiguration> => {
|
|||||||
const {VALUES} = useValues("executions");
|
const {VALUES} = useValues("executions");
|
||||||
return VALUES.EXECUTION_STATES;
|
return VALUES.EXECUTION_STATES;
|
||||||
},
|
},
|
||||||
|
searchable: true,
|
||||||
visibleByDefault: true
|
visibleByDefault: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -213,6 +213,8 @@
|
|||||||
:filters="chartFilters()"
|
:filters="chartFilters()"
|
||||||
showDefault
|
showDefault
|
||||||
short
|
short
|
||||||
|
:flow="scope.row.id"
|
||||||
|
:namespace="scope.row.namespace"
|
||||||
/>
|
/>
|
||||||
</template>
|
</template>
|
||||||
</el-table-column>
|
</el-table-column>
|
||||||
|
|||||||
@@ -3,15 +3,17 @@
|
|||||||
<span v-for="trigger in triggers" :key="uid(trigger)" :id="uid(trigger)">
|
<span v-for="trigger in triggers" :key="uid(trigger)" :id="uid(trigger)">
|
||||||
<template v-if="trigger.disabled === undefined || trigger.disabled === false">
|
<template v-if="trigger.disabled === undefined || trigger.disabled === false">
|
||||||
<el-popover
|
<el-popover
|
||||||
|
:ref="(el: any) => setPopoverRef(el, trigger)"
|
||||||
placement="left"
|
placement="left"
|
||||||
:persistent="true"
|
:persistent="true"
|
||||||
:title="`${$t('trigger details')}: ${trigger ? trigger.id : ''}`"
|
:title="`${$t('trigger details')}: ${trigger ? trigger.id : ''}`"
|
||||||
:width="500"
|
:width="500"
|
||||||
transition=""
|
transition=""
|
||||||
:hideAfter="0"
|
:hideAfter="0"
|
||||||
|
@show="handlePopoverShow"
|
||||||
>
|
>
|
||||||
<template #reference>
|
<template #reference>
|
||||||
<el-button @click="copyLink(trigger)" size="small">
|
<el-button class="trigger-icon" @click="copyLink(trigger)" size="small">
|
||||||
<TaskIcon :onlyIcon="true" :cls="trigger?.type" :icons="pluginsStore.icons" />
|
<TaskIcon :onlyIcon="true" :cls="trigger?.type" :icons="pluginsStore.icons" />
|
||||||
</el-button>
|
</el-button>
|
||||||
</template>
|
</template>
|
||||||
@@ -24,7 +26,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</template>
|
</template>
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import {computed} from "vue";
|
import {computed, ref, nextTick} from "vue";
|
||||||
import {useRoute} from "vue-router";
|
import {useRoute} from "vue-router";
|
||||||
import {usePluginsStore} from "../../stores/plugins";
|
import {usePluginsStore} from "../../stores/plugins";
|
||||||
import Utils from "../../utils/utils";
|
import Utils from "../../utils/utils";
|
||||||
@@ -61,6 +63,8 @@
|
|||||||
const pluginsStore = usePluginsStore();
|
const pluginsStore = usePluginsStore();
|
||||||
const route = useRoute();
|
const route = useRoute();
|
||||||
|
|
||||||
|
const popoverRefs = ref<Map<string, any>>(new Map());
|
||||||
|
|
||||||
const triggers = computed<Trigger[]>(() => {
|
const triggers = computed<Trigger[]>(() => {
|
||||||
if (props.flow && props.flow.triggers) {
|
if (props.flow && props.flow.triggers) {
|
||||||
return props.flow.triggers.filter(
|
return props.flow.triggers.filter(
|
||||||
@@ -77,6 +81,22 @@
|
|||||||
return (props.flow ? props.flow.namespace + "-" + props.flow.id : props.execution?.id) + "-" + trigger.id;
|
return (props.flow ? props.flow.namespace + "-" + props.flow.id : props.execution?.id) + "-" + trigger.id;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function setPopoverRef(el: any, trigger: Trigger) {
|
||||||
|
if (el) {
|
||||||
|
popoverRefs.value.set(uid(trigger), el);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handlePopoverShow() {
|
||||||
|
nextTick(() => {
|
||||||
|
popoverRefs.value.forEach((popover) => {
|
||||||
|
if (popover?.popperRef?.popperInstanceRef) {
|
||||||
|
popover.popperRef.popperInstanceRef.update();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const {t} = useI18n();
|
const {t} = useI18n();
|
||||||
const toast = useToast();
|
const toast = useToast();
|
||||||
|
|
||||||
@@ -99,12 +119,18 @@
|
|||||||
<style scoped lang="scss">
|
<style scoped lang="scss">
|
||||||
.trigger {
|
.trigger {
|
||||||
max-width: 180px;
|
max-width: 180px;
|
||||||
overflow-x: auto;
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
.el-button {
|
.trigger-icon {
|
||||||
display: inline-flex !important;
|
display: inline-flex !important;
|
||||||
|
align-items: center;
|
||||||
margin-right: .25rem;
|
margin-right: .25rem;
|
||||||
|
border: none;
|
||||||
|
background-color: transparent;
|
||||||
|
padding: 2px;
|
||||||
|
cursor: default;
|
||||||
}
|
}
|
||||||
|
|
||||||
:deep(div.wrapper) {
|
:deep(div.wrapper) {
|
||||||
|
|||||||
@@ -16,12 +16,12 @@
|
|||||||
/>
|
/>
|
||||||
<div v-else-if="invalidGraph">
|
<div v-else-if="invalidGraph">
|
||||||
<el-alert
|
<el-alert
|
||||||
:title="t('topology-graph.invalid')"
|
:title="$t('topology-graph.invalid')"
|
||||||
type="error"
|
type="error"
|
||||||
class="invalid-graph"
|
class="invalid-graph"
|
||||||
:closable="false"
|
:closable="false"
|
||||||
>
|
>
|
||||||
{{ t('topology-graph.invalid_description') }}
|
{{ $t('topology-graph.invalid_description') }}
|
||||||
</el-alert>
|
</el-alert>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -29,15 +29,12 @@
|
|||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import {computed, ref} from "vue";
|
import {computed, ref} from "vue";
|
||||||
import {useI18n} from "vue-i18n";
|
|
||||||
import {Utils} from "@kestra-io/ui-libs";
|
import {Utils} from "@kestra-io/ui-libs";
|
||||||
import LowCodeEditor from "./LowCodeEditor.vue";
|
import LowCodeEditor from "./LowCodeEditor.vue";
|
||||||
import {useFlowStore} from "../../stores/flow";
|
import {useFlowStore} from "../../stores/flow";
|
||||||
|
|
||||||
const flowStore = useFlowStore();
|
const flowStore = useFlowStore();
|
||||||
|
|
||||||
const {t} = useI18n();
|
|
||||||
|
|
||||||
const flowYaml = computed(() => flowStore.flowYaml);
|
const flowYaml = computed(() => flowStore.flowYaml);
|
||||||
const flowGraph = computed(() => flowStore.flowGraph);
|
const flowGraph = computed(() => flowStore.flowGraph);
|
||||||
const invalidGraph = computed(() => flowStore.invalidGraph);
|
const invalidGraph = computed(() => flowStore.invalidGraph);
|
||||||
|
|||||||
@@ -7,14 +7,14 @@
|
|||||||
:disabled="!playgroundStore.readyToStart"
|
:disabled="!playgroundStore.readyToStart"
|
||||||
>
|
>
|
||||||
<el-icon><Play /></el-icon>
|
<el-icon><Play /></el-icon>
|
||||||
<span>{{ t('playground.run_task') }}</span>
|
<span>{{ $t('playground.run_task') }}</span>
|
||||||
<template #dropdown>
|
<template #dropdown>
|
||||||
<el-dropdown-menu>
|
<el-dropdown-menu>
|
||||||
<el-dropdown-item :icon="Play" @click="playgroundStore.runUntilTask(taskId)">
|
<el-dropdown-item :icon="Play" @click="playgroundStore.runUntilTask(taskId)">
|
||||||
{{ t('playground.run_this_task') }}
|
{{ $t('playground.run_this_task') }}
|
||||||
</el-dropdown-item>
|
</el-dropdown-item>
|
||||||
<el-dropdown-item :icon="PlayBoxMultiple" @click="playgroundStore.runUntilTask(taskId, true)">
|
<el-dropdown-item :icon="PlayBoxMultiple" @click="playgroundStore.runUntilTask(taskId, true)">
|
||||||
{{ t('playground.run_task_and_downstream') }}
|
{{ $t('playground.run_task_and_downstream') }}
|
||||||
</el-dropdown-item>
|
</el-dropdown-item>
|
||||||
</el-dropdown-menu>
|
</el-dropdown-menu>
|
||||||
</template>
|
</template>
|
||||||
@@ -22,12 +22,10 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import {useI18n} from "vue-i18n";
|
|
||||||
import {usePlaygroundStore} from "../../stores/playground";
|
import {usePlaygroundStore} from "../../stores/playground";
|
||||||
import Play from "vue-material-design-icons/Play.vue";
|
import Play from "vue-material-design-icons/Play.vue";
|
||||||
import PlayBoxMultiple from "vue-material-design-icons/PlayBoxMultiple.vue";
|
import PlayBoxMultiple from "vue-material-design-icons/PlayBoxMultiple.vue";
|
||||||
|
|
||||||
const {t} = useI18n();
|
|
||||||
const playgroundStore = usePlaygroundStore();
|
const playgroundStore = usePlaygroundStore();
|
||||||
|
|
||||||
defineProps<{
|
defineProps<{
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
import {useCoreStore} from "../../stores/core";
|
import {useCoreStore} from "../../stores/core";
|
||||||
import {useMiscStore} from "override/stores/misc";
|
import {useMiscStore} from "override/stores/misc";
|
||||||
import {computed, onMounted} from "vue";
|
import {computed, onMounted} from "vue";
|
||||||
|
import {useLayoutStore} from "../../stores/layout";
|
||||||
|
|
||||||
const coreStore = useCoreStore();
|
const coreStore = useCoreStore();
|
||||||
const miscStore = useMiscStore();
|
const miscStore = useMiscStore();
|
||||||
@@ -22,7 +23,9 @@
|
|||||||
document.getElementsByTagName("html")[0].classList.remove(collapse ? "menu-not-collapsed" : "menu-collapsed");
|
document.getElementsByTagName("html")[0].classList.remove(collapse ? "menu-not-collapsed" : "menu-collapsed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const layoutStore = useLayoutStore();
|
||||||
|
|
||||||
onMounted(() => {
|
onMounted(() => {
|
||||||
onMenuCollapse(localStorage.getItem("menuCollapsed") === "true")
|
onMenuCollapse(Boolean(layoutStore.sideMenuCollapsed))
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
@@ -28,7 +28,7 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup lang="ts">
|
<script setup lang="ts">
|
||||||
import {onUpdated, ref, computed, h, watch} from "vue";
|
import {onUpdated, computed, h, watch} from "vue";
|
||||||
import {useI18n} from "vue-i18n";
|
import {useI18n} from "vue-i18n";
|
||||||
import {useRoute} from "vue-router";
|
import {useRoute} from "vue-router";
|
||||||
import {useMediaQuery} from "@vueuse/core";
|
import {useMediaQuery} from "@vueuse/core";
|
||||||
@@ -118,7 +118,10 @@
|
|||||||
];
|
];
|
||||||
});
|
});
|
||||||
|
|
||||||
const collapsed = ref(localStorage.getItem("menuCollapsed") === "true")
|
const collapsed = computed({
|
||||||
|
get: () => layoutStore.sideMenuCollapsed,
|
||||||
|
set: (v: boolean) => layoutStore.setSideMenuCollapsed(v),
|
||||||
|
})
|
||||||
|
|
||||||
const isSmallScreen = useMediaQuery("(max-width: 768px)")
|
const isSmallScreen = useMediaQuery("(max-width: 768px)")
|
||||||
|
|
||||||
|
|||||||
@@ -60,7 +60,7 @@
|
|||||||
@click="activeFlow = flowIndex"
|
@click="activeFlow = flowIndex"
|
||||||
>
|
>
|
||||||
<p class="title mb-2">
|
<p class="title mb-2">
|
||||||
{{ flow.description }}
|
{{ flow.labels?.find(l => l.key === 'name')?.value ?? flow.id }}
|
||||||
</p>
|
</p>
|
||||||
<div>
|
<div>
|
||||||
<div
|
<div
|
||||||
|
|||||||
@@ -25,10 +25,6 @@ const handleAuthError = (error, to) => {
|
|||||||
|
|
||||||
initApp(app, routes, null, en).then(({router, piniaStore}) => {
|
initApp(app, routes, null, en).then(({router, piniaStore}) => {
|
||||||
router.beforeEach(async (to, from, next) => {
|
router.beforeEach(async (to, from, next) => {
|
||||||
if (to.meta?.anonymous === true) {
|
|
||||||
return next();
|
|
||||||
}
|
|
||||||
|
|
||||||
if(to.path === from.path && to.query === from.query) {
|
if(to.path === from.path && to.query === from.query) {
|
||||||
return next(); // Prevent navigation if the path and query are the same
|
return next(); // Prevent navigation if the path and query are the same
|
||||||
}
|
}
|
||||||
@@ -45,13 +41,28 @@ initApp(app, routes, null, en).then(({router, piniaStore}) => {
|
|||||||
if (validationErrors?.length > 0) {
|
if (validationErrors?.length > 0) {
|
||||||
// Creds exist in config but failed validation
|
// Creds exist in config but failed validation
|
||||||
// Route to login to show errors
|
// Route to login to show errors
|
||||||
|
if (to.name === "login") {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
return next({name: "login"})
|
return next({name: "login"})
|
||||||
} else {
|
} else {
|
||||||
// No creds in config - redirect to set it up
|
// No creds in config - redirect to set it up
|
||||||
|
if (to.name === "setup") {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
return next({name: "setup"})
|
return next({name: "setup"})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (to.meta?.anonymous === true) {
|
||||||
|
if (to.name === "setup") {
|
||||||
|
return next({name: "login"});
|
||||||
|
}
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
const hasCredentials = BasicAuth.isLoggedIn()
|
const hasCredentials = BasicAuth.isLoggedIn()
|
||||||
|
|
||||||
if (!hasCredentials) {
|
if (!hasCredentials) {
|
||||||
@@ -92,6 +103,6 @@ initApp(app, routes, null, en).then(({router, piniaStore}) => {
|
|||||||
}, null, router, true);
|
}, null, router, true);
|
||||||
|
|
||||||
// mount
|
// mount
|
||||||
app.mount("#app")
|
router.isReady().then(() => app.mount("#app"))
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -67,6 +67,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="action-button">
|
<div class="action-button">
|
||||||
|
<slot name="buttons" :blueprint="blueprint" />
|
||||||
<el-tooltip v-if="embed && !system" trigger="click" content="Copied" placement="left" :autoClose="2000" effect="light">
|
<el-tooltip v-if="embed && !system" trigger="click" content="Copied" placement="left" :autoClose="2000" effect="light">
|
||||||
<el-button
|
<el-button
|
||||||
type="primary"
|
type="primary"
|
||||||
|
|||||||
@@ -12,7 +12,13 @@ export const useLayoutStore = defineStore("layout", {
|
|||||||
topNavbar: undefined,
|
topNavbar: undefined,
|
||||||
envName: localStorage.getItem("envName") || undefined,
|
envName: localStorage.getItem("envName") || undefined,
|
||||||
envColor: localStorage.getItem("envColor") || undefined,
|
envColor: localStorage.getItem("envColor") || undefined,
|
||||||
sideMenuCollapsed: localStorage.getItem("menuCollapsed") === "true",
|
sideMenuCollapsed: (() => {
|
||||||
|
if (typeof window === "undefined") {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return localStorage.getItem("menuCollapsed") === "true" || window.matchMedia("(max-width: 768px)").matches;
|
||||||
|
})(),
|
||||||
}),
|
}),
|
||||||
getters: {},
|
getters: {},
|
||||||
actions: {
|
actions: {
|
||||||
|
|||||||
@@ -20,6 +20,17 @@
|
|||||||
url('../../src/assets/fonts/public-sans/public-sans-v21-latin-regular.woff2') format('woff2');
|
url('../../src/assets/fonts/public-sans/public-sans-v21-latin-regular.woff2') format('woff2');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: 'Public Sans';
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 600;
|
||||||
|
font-display: swap;
|
||||||
|
src:
|
||||||
|
local('Public Sans SemiBold'),
|
||||||
|
local('PublicSans-SemiBold'),
|
||||||
|
url('../../src/assets/fonts/public-sans/public-sans-v21-latin-600.woff2') format('woff2');
|
||||||
|
}
|
||||||
|
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'Public Sans';
|
font-family: 'Public Sans';
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "E-Mail",
|
"email": "E-Mail",
|
||||||
"firstName": "Vorname",
|
"firstName": "Vorname",
|
||||||
"lastName": "Nachname",
|
"lastName": "Nachname",
|
||||||
"password": "Passwort"
|
"password": "Passwort",
|
||||||
|
"password_requirements": "Das Passwort muss mindestens 8 Zeichen lang sein und mindestens 1 Großbuchstaben und 1 Zahl enthalten."
|
||||||
},
|
},
|
||||||
"login": "Anmelden",
|
"login": "Anmelden",
|
||||||
"logout": "Abmelden",
|
"logout": "Abmelden",
|
||||||
|
|||||||
@@ -262,7 +262,7 @@
|
|||||||
"output": "Output",
|
"output": "Output",
|
||||||
"eval": {
|
"eval": {
|
||||||
"title": "Debug Expression",
|
"title": "Debug Expression",
|
||||||
"render": "Render Expression",
|
"render": "Render",
|
||||||
"tooltip": "Render any Pebble expression and inspect the Execution context."
|
"tooltip": "Render any Pebble expression and inspect the Execution context."
|
||||||
},
|
},
|
||||||
"attempt": "Attempt",
|
"attempt": "Attempt",
|
||||||
@@ -1477,7 +1477,8 @@
|
|||||||
"email": "Email",
|
"email": "Email",
|
||||||
"firstName": "First Name",
|
"firstName": "First Name",
|
||||||
"lastName": "Last Name",
|
"lastName": "Last Name",
|
||||||
"password": "Password"
|
"password": "Password",
|
||||||
|
"password_requirements": "Password must be at least 8 characters long and include at least 1 uppercase letter and 1 number."
|
||||||
},
|
},
|
||||||
"validation": {
|
"validation": {
|
||||||
"email_required": "Email is required",
|
"email_required": "Email is required",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "Correo electrónico",
|
"email": "Correo electrónico",
|
||||||
"firstName": "Nombre",
|
"firstName": "Nombre",
|
||||||
"lastName": "Apellido",
|
"lastName": "Apellido",
|
||||||
"password": "Contraseña"
|
"password": "Contraseña",
|
||||||
|
"password_requirements": "La contraseña debe tener al menos 8 caracteres y contener al menos 1 letra mayúscula y 1 número."
|
||||||
},
|
},
|
||||||
"login": "Iniciar sesión",
|
"login": "Iniciar sesión",
|
||||||
"logout": "Cerrar sesión",
|
"logout": "Cerrar sesión",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "E-mail",
|
"email": "E-mail",
|
||||||
"firstName": "Prénom",
|
"firstName": "Prénom",
|
||||||
"lastName": "Nom de famille",
|
"lastName": "Nom de famille",
|
||||||
"password": "Mot de passe"
|
"password": "Mot de passe",
|
||||||
|
"password_requirements": "Le mot de passe doit comporter au moins 8 caractères, inclure au moins 1 lettre majuscule et 1 chiffre."
|
||||||
},
|
},
|
||||||
"login": "Connexion",
|
"login": "Connexion",
|
||||||
"logout": "Déconnexion",
|
"logout": "Déconnexion",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "ईमेल",
|
"email": "ईमेल",
|
||||||
"firstName": "पहला नाम",
|
"firstName": "पहला नाम",
|
||||||
"lastName": "अंतिम नाम",
|
"lastName": "अंतिम नाम",
|
||||||
"password": "पासवर्ड"
|
"password": "पासवर्ड",
|
||||||
|
"password_requirements": "पासवर्ड कम से कम 8 अक्षरों का होना चाहिए और इसमें कम से कम 1 बड़ा अक्षर और 1 संख्या शामिल होनी चाहिए।"
|
||||||
},
|
},
|
||||||
"login": "लॉगिन",
|
"login": "लॉगिन",
|
||||||
"logout": "लॉगआउट",
|
"logout": "लॉगआउट",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "Email",
|
"email": "Email",
|
||||||
"firstName": "Nome",
|
"firstName": "Nome",
|
||||||
"lastName": "Cognome",
|
"lastName": "Cognome",
|
||||||
"password": "Password"
|
"password": "Password",
|
||||||
|
"password_requirements": "La password deve essere lunga almeno 8 caratteri e includere almeno 1 lettera maiuscola e 1 numero."
|
||||||
},
|
},
|
||||||
"login": "Accedi",
|
"login": "Accedi",
|
||||||
"logout": "Logout",
|
"logout": "Logout",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "メール",
|
"email": "メール",
|
||||||
"firstName": "名",
|
"firstName": "名",
|
||||||
"lastName": "姓",
|
"lastName": "姓",
|
||||||
"password": "パスワード"
|
"password": "パスワード",
|
||||||
|
"password_requirements": "パスワードは8文字以上で、少なくとも1つの大文字と1つの数字を含める必要があります。"
|
||||||
},
|
},
|
||||||
"login": "ログイン",
|
"login": "ログイン",
|
||||||
"logout": "ログアウト",
|
"logout": "ログアウト",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "이메일",
|
"email": "이메일",
|
||||||
"firstName": "이름",
|
"firstName": "이름",
|
||||||
"lastName": "성씨",
|
"lastName": "성씨",
|
||||||
"password": "비밀번호"
|
"password": "비밀번호",
|
||||||
|
"password_requirements": "비밀번호는 최소 8자 이상이어야 하며, 최소 1개의 대문자와 1개의 숫자를 포함해야 합니다."
|
||||||
},
|
},
|
||||||
"login": "로그인",
|
"login": "로그인",
|
||||||
"logout": "로그아웃",
|
"logout": "로그아웃",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "Email",
|
"email": "Email",
|
||||||
"firstName": "Imię",
|
"firstName": "Imię",
|
||||||
"lastName": "Nazwisko",
|
"lastName": "Nazwisko",
|
||||||
"password": "Hasło"
|
"password": "Hasło",
|
||||||
|
"password_requirements": "Hasło musi mieć co najmniej 8 znaków i zawierać co najmniej 1 wielką literę oraz 1 cyfrę."
|
||||||
},
|
},
|
||||||
"login": "Zaloguj się",
|
"login": "Zaloguj się",
|
||||||
"logout": "Wyloguj się",
|
"logout": "Wyloguj się",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "Email",
|
"email": "Email",
|
||||||
"firstName": "Nome",
|
"firstName": "Nome",
|
||||||
"lastName": "Sobrenome",
|
"lastName": "Sobrenome",
|
||||||
"password": "Senha"
|
"password": "Senha",
|
||||||
|
"password_requirements": "A senha deve ter pelo menos 8 caracteres e incluir pelo menos 1 letra maiúscula e 1 número."
|
||||||
},
|
},
|
||||||
"login": "Login",
|
"login": "Login",
|
||||||
"logout": "Sair",
|
"logout": "Sair",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "Email",
|
"email": "Email",
|
||||||
"firstName": "Nome",
|
"firstName": "Nome",
|
||||||
"lastName": "Sobrenome",
|
"lastName": "Sobrenome",
|
||||||
"password": "Senha"
|
"password": "Senha",
|
||||||
|
"password_requirements": "A senha deve ter pelo menos 8 caracteres e incluir pelo menos 1 letra maiúscula e 1 número."
|
||||||
},
|
},
|
||||||
"login": "Login",
|
"login": "Login",
|
||||||
"logout": "Sair",
|
"logout": "Sair",
|
||||||
|
|||||||
@@ -1612,7 +1612,8 @@
|
|||||||
"email": "Электронная почта",
|
"email": "Электронная почта",
|
||||||
"firstName": "Имя",
|
"firstName": "Имя",
|
||||||
"lastName": "Фамилия",
|
"lastName": "Фамилия",
|
||||||
"password": "Пароль"
|
"password": "Пароль",
|
||||||
|
"password_requirements": "Пароль должен содержать не менее 8 символов, включая как минимум 1 заглавную букву и 1 цифру."
|
||||||
},
|
},
|
||||||
"login": "Войти",
|
"login": "Войти",
|
||||||
"logout": "Выход",
|
"logout": "Выход",
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user