mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 14:00:23 -05:00
Compare commits
74 Commits
run-develo
...
v1.1.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f6a1cf377 | ||
|
|
420e081c69 | ||
|
|
0a7fffe1c5 | ||
|
|
48d14c9ed9 | ||
|
|
21a42a072a | ||
|
|
4f48ea0c21 | ||
|
|
890fa791e8 | ||
|
|
5e57de5cdf | ||
|
|
cf2c6cd2b1 | ||
|
|
b688dbc30b | ||
|
|
40877cc1cc | ||
|
|
c0f178a159 | ||
|
|
c64a083ac7 | ||
|
|
ccf9d9b303 | ||
|
|
25dbdbd713 | ||
|
|
d54477051f | ||
|
|
54a63d1b04 | ||
|
|
6f271e5694 | ||
|
|
0a718dab30 | ||
|
|
ec522a6d44 | ||
|
|
ad73a46b0c | ||
|
|
ca56559c49 | ||
|
|
ed739ec257 | ||
|
|
9effef9fcd | ||
|
|
ffc61b2482 | ||
|
|
fbbc0824ff | ||
|
|
842b8d604b | ||
|
|
bd5ac06c5b | ||
|
|
335fe1e88c | ||
|
|
5c52ab300a | ||
|
|
756069f1a6 | ||
|
|
faba958f08 | ||
|
|
a772a61d62 | ||
|
|
f2cb79cb98 | ||
|
|
9ea0b1cebb | ||
|
|
867dc20d47 | ||
|
|
c669759afb | ||
|
|
7e3cd8a2cb | ||
|
|
f203c5f43a | ||
|
|
f4e90cc540 | ||
|
|
ce0fd58c94 | ||
|
|
f1b950941c | ||
|
|
559f3f2634 | ||
|
|
9bc65b84f1 | ||
|
|
223b137381 | ||
|
|
80d1df6eeb | ||
|
|
a87e7f3b8d | ||
|
|
710862ef33 | ||
|
|
d74f535ea1 | ||
|
|
1673f24356 | ||
|
|
2ad90625b8 | ||
|
|
e77b80a1a8 | ||
|
|
6223b1f672 | ||
|
|
23329f4d48 | ||
|
|
ed60cb6670 | ||
|
|
f6306883b4 | ||
|
|
89433dc04c | ||
|
|
4837408c59 | ||
|
|
5a8c36caa5 | ||
|
|
a2335abc0c | ||
|
|
310a7bbbe9 | ||
|
|
162feaf38c | ||
|
|
94050be49c | ||
|
|
848a5ac9d7 | ||
|
|
9ac7a9ce9a | ||
|
|
c42838f3e1 | ||
|
|
c499d62b63 | ||
|
|
8fbc62e12c | ||
|
|
ae143f29f4 | ||
|
|
e4a11fc9ce | ||
|
|
ebacfc70b9 | ||
|
|
5bf67180a3 | ||
|
|
1e670b5e7e | ||
|
|
0dacad5ee1 |
@@ -2,6 +2,7 @@ package io.kestra.cli.commands.migrations.metadata;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -12,13 +13,13 @@ import picocli.CommandLine;
|
||||
@Slf4j
|
||||
public class KvMetadataMigrationCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private MetadataMigrationService metadataMigrationService;
|
||||
private Provider<MetadataMigrationService> metadataMigrationServiceProvider;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
try {
|
||||
metadataMigrationService.kvMigration();
|
||||
metadataMigrationServiceProvider.get().kvMigration();
|
||||
} catch (Exception e) {
|
||||
System.err.println("❌ KV Metadata migration failed: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
|
||||
@@ -2,6 +2,7 @@ package io.kestra.cli.commands.migrations.metadata;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Provider;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -12,13 +13,13 @@ import picocli.CommandLine;
|
||||
@Slf4j
|
||||
public class SecretsMetadataMigrationCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private MetadataMigrationService metadataMigrationService;
|
||||
private Provider<MetadataMigrationService> metadataMigrationServiceProvider;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
try {
|
||||
metadataMigrationService.secretMigration();
|
||||
metadataMigrationServiceProvider.get().secretMigration();
|
||||
} catch (Exception e) {
|
||||
System.err.println("❌ Secrets Metadata migration failed: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.kestra.core.services.StartExecutorService;
|
||||
@@ -10,6 +12,8 @@ import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -19,6 +23,9 @@ import java.util.Map;
|
||||
description = "Start the Kestra executor"
|
||||
)
|
||||
public class ExecutorCommand extends AbstractServerCommand {
|
||||
@CommandLine.Spec
|
||||
CommandLine.Model.CommandSpec spec;
|
||||
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@@ -28,22 +35,28 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private StartExecutorService startExecutorService;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "The list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "Tenant identifier required to load flows from the specified path")
|
||||
private File flowPath;
|
||||
|
||||
@CommandLine.Option(names = "--tenant", description = "Tenant identifier, Required to load flows from path")
|
||||
private String tenantId;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "List of execution IDs to skip, separated by commas; for troubleshooting only")
|
||||
private List<String> skipExecutions = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "The list of flow identifiers (tenant|namespace|flowId) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "List of flow identifiers (tenant|namespace|flowId) to skip, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipFlows = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "The list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "List of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipNamespaces = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "The list of tenants to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "List of tenants to skip, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipTenants = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "The list of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
|
||||
@CommandLine.Option(names = {"--start-executors"}, split=",", description = "List of Kafka Stream executors to start, separated by a command. Use it only with the Kafka queue; for debugging only")
|
||||
private List<String> startExecutors = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "The list of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue, for debugging purpose.")
|
||||
@CommandLine.Option(names = {"--not-start-executors"}, split=",", description = "Lst of Kafka Stream executors to not start, separated by a command. Use it only with the Kafka queue; for debugging only")
|
||||
private List<String> notStartExecutors = Collections.emptyList();
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@@ -64,6 +77,16 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
|
||||
super.call();
|
||||
|
||||
if (flowPath != null) {
|
||||
try {
|
||||
LocalFlowRepositoryLoader localFlowRepositoryLoader = applicationContext.getBean(LocalFlowRepositoryLoader.class);
|
||||
TenantIdSelectorService tenantIdSelectorService = applicationContext.getBean(TenantIdSelectorService.class);
|
||||
localFlowRepositoryLoader.load(tenantIdSelectorService.getTenantId(this.tenantId), this.flowPath);
|
||||
} catch (IOException e) {
|
||||
throw new CommandLine.ParameterException(this.spec.commandLine(), "Invalid flow path", e);
|
||||
}
|
||||
}
|
||||
|
||||
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
|
||||
executorService.run();
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ public class IndexerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private SkipExecutionService skipExecutionService;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
|
||||
@@ -42,7 +42,7 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@Nullable
|
||||
private FileChangedEventListener fileWatcher;
|
||||
|
||||
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "the flow path containing flow to inject at startup (when running with a memory flow repository)")
|
||||
@CommandLine.Option(names = {"-f", "--flow-path"}, description = "Tenant identifier required to load flows from the specified path")
|
||||
private File flowPath;
|
||||
|
||||
@CommandLine.Option(names = "--tenant", description = "Tenant identifier, Required to load flows from path with the enterprise edition")
|
||||
@@ -51,19 +51,19 @@ public class StandAloneCommand extends AbstractServerCommand {
|
||||
@CommandLine.Option(names = {"--worker-thread"}, description = "the number of worker threads, defaults to eight times the number of available processors. Set it to 0 to avoid starting a worker.")
|
||||
private int workerThread = defaultWorkerThread();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-executions"}, split=",", description = "a list of execution identifiers to skip, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipExecutions = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "a list of flow identifiers (namespace.flowId) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-flows"}, split=",", description = "a list of flow identifiers (namespace.flowId) to skip, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipFlows = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "a list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-namespaces"}, split=",", description = "a list of namespace identifiers (tenant|namespace) to skip, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipNamespaces = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "a list of tenants to skip, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-tenants"}, split=",", description = "a list of tenants to skip, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipTenants = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@CommandLine.Option(names = {"--no-tutorials"}, description = "Flag to disable auto-loading of tutorial flows.")
|
||||
|
||||
@@ -40,7 +40,7 @@ public class WebServerCommand extends AbstractServerCommand {
|
||||
@Option(names = {"--no-indexer"}, description = "Flag to disable starting an embedded indexer.")
|
||||
private boolean indexerDisabled = false;
|
||||
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting purpose only")
|
||||
@CommandLine.Option(names = {"--skip-indexer-records"}, split=",", description = "a list of indexer record keys, separated by a coma; for troubleshooting only")
|
||||
private List<String> skipIndexerRecords = Collections.emptyList();
|
||||
|
||||
@Override
|
||||
|
||||
@@ -30,15 +30,15 @@ micronaut:
|
||||
read-idle-timeout: 60m
|
||||
write-idle-timeout: 60m
|
||||
idle-timeout: 60m
|
||||
netty:
|
||||
max-zstd-encode-size: 67108864 # increased to 64MB from the default of 32MB
|
||||
max-chunk-size: 10MB
|
||||
max-header-size: 32768 # increased from the default of 8k
|
||||
responses:
|
||||
file:
|
||||
cache-seconds: 86400
|
||||
cache-control:
|
||||
public: true
|
||||
netty:
|
||||
max-zstd-encode-size: 67108864 # increased to 64MB from the default of 32MB
|
||||
max-chunk-size: 10MB
|
||||
max-header-size: 32768 # increased from the default of 8k
|
||||
|
||||
# Access log configuration, see https://docs.micronaut.io/latest/guide/index.html#accessLogger
|
||||
access-logger:
|
||||
|
||||
@@ -68,7 +68,8 @@ class NoConfigCommandTest {
|
||||
|
||||
|
||||
assertThat(exitCode).isNotZero();
|
||||
assertThat(out.toString()).isEmpty();
|
||||
// check that the only log is an access log: this has the advantage to also check that access log is working!
|
||||
assertThat(out.toString()).contains("POST /api/v1/main/flows HTTP/1.1 | status: 500");
|
||||
assertThat(err.toString()).contains("No bean of type [io.kestra.core.repositories.FlowRepositoryInterface] exists");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.dashboards.filters.AbstractFilter;
|
||||
import io.kestra.core.repositories.QueryBuilderInterface;
|
||||
import io.kestra.plugin.core.dashboard.data.IData;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
@@ -33,9 +35,12 @@ public abstract class DataFilter<F extends Enum<F>, C extends ColumnDescriptor<F
|
||||
@Pattern(regexp = JAVA_IDENTIFIER_REGEX)
|
||||
private String type;
|
||||
|
||||
@Valid
|
||||
private Map<String, C> columns;
|
||||
|
||||
@Setter
|
||||
@Valid
|
||||
@Nullable
|
||||
private List<AbstractFilter<F>> where;
|
||||
|
||||
private List<OrderBy> orderBy;
|
||||
|
||||
@@ -5,6 +5,7 @@ import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.dashboards.ChartOption;
|
||||
import io.kestra.core.models.dashboards.DataFilter;
|
||||
import io.kestra.core.validations.DataChartValidation;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
@@ -20,6 +21,7 @@ import lombok.experimental.SuperBuilder;
|
||||
@DataChartValidation
|
||||
public abstract class DataChart<P extends ChartOption, D extends DataFilter<?, ?>> extends Chart<P> implements io.kestra.core.models.Plugin {
|
||||
@NotNull
|
||||
@Valid
|
||||
private D data;
|
||||
|
||||
public Integer minNumberOfAggregations() {
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
package io.kestra.core.models.dashboards.filters;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
@@ -32,6 +35,9 @@ import lombok.experimental.SuperBuilder;
|
||||
@SuperBuilder
|
||||
@Introspected
|
||||
public abstract class AbstractFilter<F extends Enum<F>> {
|
||||
@NotNull
|
||||
@JsonProperty(value = "field", required = true)
|
||||
@Valid
|
||||
private F field;
|
||||
private String labelKey;
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
package io.kestra.core.models.executions;
|
||||
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import io.kestra.core.models.tasks.Output;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
@Value
|
||||
@Builder
|
||||
@@ -21,6 +22,7 @@ public class ExecutionTrigger {
|
||||
@NotNull
|
||||
String type;
|
||||
|
||||
@Schema(type = "object", additionalProperties = Schema.AdditionalPropertiesValue.TRUE)
|
||||
Map<String, Object> variables;
|
||||
|
||||
URI logFile;
|
||||
|
||||
@@ -35,7 +35,6 @@ import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
@JsonDeserialize(using = Property.PropertyDeserializer.class)
|
||||
@JsonSerialize(using = Property.PropertySerializer.class)
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor(access = AccessLevel.PACKAGE)
|
||||
@Schema(
|
||||
oneOf = {
|
||||
@@ -51,6 +50,7 @@ public class Property<T> {
|
||||
.copy()
|
||||
.configure(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS, false);
|
||||
|
||||
private final boolean skipCache;
|
||||
private String expression;
|
||||
private T value;
|
||||
|
||||
@@ -60,13 +60,23 @@ public class Property<T> {
|
||||
@Deprecated
|
||||
// Note: when not used, this constructor would not be deleted but made private so it can only be used by ofExpression(String) and the deserializer
|
||||
public Property(String expression) {
|
||||
this.expression = expression;
|
||||
this(expression, false);
|
||||
}
|
||||
|
||||
private Property(String expression, boolean skipCache) {
|
||||
this.expression = expression;
|
||||
this.skipCache = skipCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use {@link #ofValue(Object)} instead.
|
||||
*/
|
||||
@VisibleForTesting
|
||||
@Deprecated
|
||||
public Property(Map<?, ?> map) {
|
||||
try {
|
||||
expression = MAPPER.writeValueAsString(map);
|
||||
this.skipCache = false;
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
@@ -79,9 +89,6 @@ public class Property<T> {
|
||||
/**
|
||||
* Returns a new {@link Property} with no cached rendered value,
|
||||
* so that the next render will evaluate its original Pebble expression.
|
||||
* <p>
|
||||
* The returned property will still cache its rendered result.
|
||||
* To re-evaluate on a subsequent render, call {@code skipCache()} again.
|
||||
*
|
||||
* @return a new {@link Property} without a pre-rendered value
|
||||
*/
|
||||
@@ -133,6 +140,7 @@ public class Property<T> {
|
||||
|
||||
/**
|
||||
* Build a new Property object with a Pebble expression.<br>
|
||||
* This property object will not cache its rendered value.
|
||||
* <p>
|
||||
* Use {@link #ofValue(Object)} to build a property with a value instead.
|
||||
*/
|
||||
@@ -142,11 +150,11 @@ public class Property<T> {
|
||||
throw new IllegalArgumentException("'expression' must be a valid Pebble expression");
|
||||
}
|
||||
|
||||
return new Property<>(expression);
|
||||
return new Property<>(expression, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a property then convert it to its target type.<br>
|
||||
* Render a property, then convert it to its target type.<br>
|
||||
* <p>
|
||||
* This method is designed to be used only by the {@link io.kestra.core.runners.RunContextProperty}.
|
||||
*
|
||||
@@ -164,7 +172,7 @@ public class Property<T> {
|
||||
* @see io.kestra.core.runners.RunContextProperty#as(Class, Map)
|
||||
*/
|
||||
public static <T> T as(Property<T> property, PropertyContext context, Class<T> clazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.value == null) {
|
||||
if (property.skipCache || property.value == null) {
|
||||
String rendered = context.render(property.expression, variables);
|
||||
property.value = MAPPER.convertValue(rendered, clazz);
|
||||
}
|
||||
@@ -192,7 +200,7 @@ public class Property<T> {
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T, I> T asList(Property<T> property, PropertyContext context, Class<I> itemClazz, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.value == null) {
|
||||
if (property.skipCache || property.value == null) {
|
||||
JavaType type = MAPPER.getTypeFactory().constructCollectionLikeType(List.class, itemClazz);
|
||||
try {
|
||||
String trimmedExpression = property.expression.trim();
|
||||
@@ -244,7 +252,7 @@ public class Property<T> {
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
public static <T, K, V> T asMap(Property<T> property, RunContext runContext, Class<K> keyClass, Class<V> valueClass, Map<String, Object> variables) throws IllegalVariableEvaluationException {
|
||||
if (property.value == null) {
|
||||
if (property.skipCache || property.value == null) {
|
||||
JavaType targetMapType = MAPPER.getTypeFactory().constructMapType(Map.class, keyClass, valueClass);
|
||||
|
||||
try {
|
||||
|
||||
@@ -82,8 +82,7 @@ public abstract class FilesService {
|
||||
}
|
||||
|
||||
private static String resolveUniqueNameForFile(final Path path) {
|
||||
String filename = path.getFileName().toString();
|
||||
String encodedFilename = java.net.URLEncoder.encode(filename, java.nio.charset.StandardCharsets.UTF_8);
|
||||
return IdUtils.from(path.toString()) + "-" + encodedFilename;
|
||||
String filename = path.getFileName().toString().replace(' ', '+');
|
||||
return IdUtils.from(path.toString()) + "-" + filename;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,10 +151,7 @@ abstract class AbstractFileFunction implements Function {
|
||||
// if there is a trigger of type execution, we also allow accessing a file from the parent execution
|
||||
Map<String, String> trigger = (Map<String, String>) context.getVariable(TRIGGER);
|
||||
|
||||
if (!isFileUriValid(trigger.get(NAMESPACE), trigger.get("flowId"), trigger.get("executionId"), path)) {
|
||||
throw new IllegalArgumentException("Unable to read the file '" + path + "' as it didn't belong to the parent execution");
|
||||
}
|
||||
return true;
|
||||
return isFileUriValid(trigger.get(NAMESPACE), trigger.get("flowId"), trigger.get("executionId"), path);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -383,6 +383,7 @@ public class ExecutionService {
|
||||
if (!isFlowable || s.equals(taskRunId)) {
|
||||
TaskRun newTaskRun;
|
||||
|
||||
State.Type targetState = newState;
|
||||
if (task instanceof Pause pauseTask) {
|
||||
State.Type terminalState = newState == State.Type.RUNNING ? State.Type.SUCCESS : newState;
|
||||
Pause.Resumed _resumed = resumed != null ? resumed : Pause.Resumed.now(terminalState);
|
||||
@@ -392,23 +393,23 @@ public class ExecutionService {
|
||||
// if it's a Pause task with no subtask, we terminate the task
|
||||
if (ListUtils.isEmpty(pauseTask.getTasks()) && ListUtils.isEmpty(pauseTask.getErrors()) && ListUtils.isEmpty(pauseTask.getFinally())) {
|
||||
if (newState == State.Type.RUNNING) {
|
||||
newTaskRun = newTaskRun.withState(State.Type.SUCCESS);
|
||||
targetState = State.Type.SUCCESS;
|
||||
} else if (newState == State.Type.KILLING) {
|
||||
newTaskRun = newTaskRun.withState(State.Type.KILLED);
|
||||
} else {
|
||||
newTaskRun = newTaskRun.withState(newState);
|
||||
targetState = State.Type.KILLED;
|
||||
}
|
||||
} else {
|
||||
// we should set the state to RUNNING so that subtasks are executed
|
||||
newTaskRun = newTaskRun.withState(State.Type.RUNNING);
|
||||
targetState = State.Type.RUNNING;
|
||||
}
|
||||
newTaskRun = newTaskRun.withState(targetState);
|
||||
} else {
|
||||
newTaskRun = originalTaskRun.withState(newState);
|
||||
newTaskRun = originalTaskRun.withState(targetState);
|
||||
}
|
||||
|
||||
|
||||
if (originalTaskRun.getAttempts() != null && !originalTaskRun.getAttempts().isEmpty()) {
|
||||
ArrayList<TaskRunAttempt> attempts = new ArrayList<>(originalTaskRun.getAttempts());
|
||||
attempts.set(attempts.size() - 1, attempts.getLast().withState(newState));
|
||||
attempts.set(attempts.size() - 1, attempts.getLast().withState(targetState));
|
||||
newTaskRun = newTaskRun.withAttempts(attempts);
|
||||
}
|
||||
|
||||
|
||||
@@ -33,11 +33,13 @@ public class ExecutionsDataFilterValidator implements ConstraintValidator<Execut
|
||||
}
|
||||
});
|
||||
|
||||
executionsDataFilter.getWhere().forEach(filter -> {
|
||||
if (filter.getField() == Executions.Fields.LABELS && filter.getLabelKey() == null) {
|
||||
violations.add("Label filters must have a `labelKey`.");
|
||||
}
|
||||
});
|
||||
if (executionsDataFilter.getWhere() != null) {
|
||||
executionsDataFilter.getWhere().forEach(filter -> {
|
||||
if (filter.getField() == Executions.Fields.LABELS && filter.getLabelKey() == null) {
|
||||
violations.add("Label filters must have a `labelKey`.");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!violations.isEmpty()) {
|
||||
context.disableDefaultConstraintViolation();
|
||||
|
||||
@@ -20,8 +20,6 @@ import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.net.URI;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
@@ -60,7 +58,15 @@ import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
public class Download extends AbstractHttp implements RunnableTask<Download.Output> {
|
||||
@Schema(title = "Should the task fail when downloading an empty file.")
|
||||
@Builder.Default
|
||||
private final Property<Boolean> failOnEmptyResponse = Property.ofValue(true);
|
||||
private Property<Boolean> failOnEmptyResponse = Property.ofValue(true);
|
||||
|
||||
@Schema(
|
||||
title = "Name of the file inside the output.",
|
||||
description = """
|
||||
If not provided, the filename will be extracted from the `Content-Disposition` header.
|
||||
If no `Content-Disposition` header, a name would be generated."""
|
||||
)
|
||||
private Property<String> saveAs;
|
||||
|
||||
public Output run(RunContext runContext) throws Exception {
|
||||
Logger logger = runContext.logger();
|
||||
@@ -111,20 +117,22 @@ public class Download extends AbstractHttp implements RunnableTask<Download.Outp
|
||||
}
|
||||
}
|
||||
|
||||
String filename = null;
|
||||
if (response.getHeaders().firstValue("Content-Disposition").isPresent()) {
|
||||
String contentDisposition = response.getHeaders().firstValue("Content-Disposition").orElseThrow();
|
||||
filename = filenameFromHeader(runContext, contentDisposition);
|
||||
}
|
||||
if (filename != null) {
|
||||
filename = URLEncoder.encode(filename, StandardCharsets.UTF_8);
|
||||
String rFilename = runContext.render(this.saveAs).as(String.class).orElse(null);
|
||||
if (rFilename == null) {
|
||||
if (response.getHeaders().firstValue("Content-Disposition").isPresent()) {
|
||||
String contentDisposition = response.getHeaders().firstValue("Content-Disposition").orElseThrow();
|
||||
rFilename = filenameFromHeader(runContext, contentDisposition);
|
||||
if (rFilename != null) {
|
||||
rFilename = rFilename.replace(' ', '+');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("File '{}' downloaded with size '{}'", from, size);
|
||||
|
||||
return Output.builder()
|
||||
.code(response.getStatus().getCode())
|
||||
.uri(runContext.storage().putFile(tempFile, filename))
|
||||
.uri(runContext.storage().putFile(tempFile, rFilename))
|
||||
.headers(response.getHeaders().map())
|
||||
.length(size.get())
|
||||
.build();
|
||||
|
||||
@@ -267,6 +267,18 @@ public abstract class AbstractRunnerTest {
|
||||
multipleConditionTriggerCaseTest.flowTriggerMultiplePreconditions();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-trigger-multiple-conditions-flow-a.yaml", "flows/valids/flow-trigger-multiple-conditions-flow-listen.yaml"})
|
||||
void flowTriggerMultipleConditions() throws Exception {
|
||||
multipleConditionTriggerCaseTest.flowTriggerMultipleConditions();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-trigger-mixed-conditions-flow-a.yaml", "flows/valids/flow-trigger-mixed-conditions-flow-listen.yaml"})
|
||||
void flowTriggerMixedConditions() throws Exception {
|
||||
multipleConditionTriggerCaseTest.flowTriggerMixedConditions();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/each-null.yaml"})
|
||||
void eachWithNull() throws Exception {
|
||||
|
||||
@@ -445,6 +445,7 @@ class ExecutionServiceTest {
|
||||
|
||||
assertThat(killed.getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
|
||||
assertThat(killed.findTaskRunsByTaskId("pause").getFirst().getState().getCurrent()).isEqualTo(State.Type.KILLED);
|
||||
assertThat(killed.findTaskRunsByTaskId("pause").getFirst().getAttempts().getFirst().getState().getCurrent()).isEqualTo(State.Type.KILLED);
|
||||
assertThat(killed.getState().getHistories()).hasSize(5);
|
||||
}
|
||||
|
||||
|
||||
@@ -106,28 +106,28 @@ class FilesServiceTest {
|
||||
var runContext = runContextFactory.of();
|
||||
|
||||
Path fileWithSpace = tempDir.resolve("with space.txt");
|
||||
Path fileWithUnicode = tempDir.resolve("สวัสดี.txt");
|
||||
Path fileWithUnicode = tempDir.resolve("สวัสดี&.txt");
|
||||
|
||||
Files.writeString(fileWithSpace, "content");
|
||||
Files.writeString(fileWithUnicode, "content");
|
||||
|
||||
Path targetFileWithSpace = runContext.workingDir().path().resolve("with space.txt");
|
||||
Path targetFileWithUnicode = runContext.workingDir().path().resolve("สวัสดี.txt");
|
||||
Path targetFileWithUnicode = runContext.workingDir().path().resolve("สวัสดี&.txt");
|
||||
|
||||
Files.copy(fileWithSpace, targetFileWithSpace);
|
||||
Files.copy(fileWithUnicode, targetFileWithUnicode);
|
||||
|
||||
Map<String, URI> outputFiles = FilesService.outputFiles(
|
||||
runContext,
|
||||
List.of("with space.txt", "สวัสดี.txt")
|
||||
List.of("with space.txt", "สวัสดี&.txt")
|
||||
);
|
||||
|
||||
assertThat(outputFiles).hasSize(2);
|
||||
assertThat(outputFiles).containsKey("with space.txt");
|
||||
assertThat(outputFiles).containsKey("สวัสดี.txt");
|
||||
assertThat(outputFiles).containsKey("สวัสดี&.txt");
|
||||
|
||||
assertThat(runContext.storage().getFile(outputFiles.get("with space.txt"))).isNotNull();
|
||||
assertThat(runContext.storage().getFile(outputFiles.get("สวัสดี.txt"))).isNotNull();
|
||||
assertThat(runContext.storage().getFile(outputFiles.get("สวัสดี&.txt"))).isNotNull();
|
||||
}
|
||||
|
||||
private URI createFile() throws IOException {
|
||||
|
||||
@@ -212,4 +212,44 @@ public class MultipleConditionTriggerCaseTest {
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, "io.kestra.tests.trigger.multiple.preconditions", "flow-trigger-multiple-preconditions-flow-listen", Duration.ofSeconds(1)));
|
||||
}
|
||||
|
||||
public void flowTriggerMultipleConditions() throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.multiple.conditions",
|
||||
"flow-trigger-multiple-conditions-flow-a");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// trigger is done
|
||||
Execution triggerExecution = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, "io.kestra.tests.trigger.multiple.conditions", "flow-trigger-multiple-conditions-flow-listen");
|
||||
executionRepository.delete(triggerExecution);
|
||||
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// we assert that we didn't have any other flow triggered
|
||||
assertThrows(RuntimeException.class, () -> runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, "io.kestra.tests.trigger.multiple.conditions", "flow-trigger-multiple-conditions-flow-listen", Duration.ofSeconds(1)));
|
||||
}
|
||||
|
||||
public void flowTriggerMixedConditions() throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.mixed.conditions",
|
||||
"flow-trigger-mixed-conditions-flow-a");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// trigger is done
|
||||
Execution triggerExecution = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, "io.kestra.tests.trigger.mixed.conditions", "flow-trigger-mixed-conditions-flow-listen");
|
||||
executionRepository.delete(triggerExecution);
|
||||
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// we assert that we didn't have any other flow triggered
|
||||
assertThrows(RuntimeException.class, () -> runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, "io.kestra.tests.trigger.mixed.conditions", "flow-trigger-mixed-conditions-flow-listen", Duration.ofSeconds(1)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,24 +83,37 @@ class RunContextPropertyTest {
|
||||
runContextProperty = new RunContextProperty<>(Property.<Map<String, String>>builder().expression("{ \"key\": \"{{ key }}\"}").build(), runContext);
|
||||
assertThat(runContextProperty.asMap(String.class, String.class, Map.of("key", "value"))).containsEntry("key", "value");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void asShouldReturnCachedRenderedProperty() throws IllegalVariableEvaluationException {
|
||||
var runContext = runContextFactory.of();
|
||||
|
||||
|
||||
var runContextProperty = new RunContextProperty<>(Property.<String>builder().expression("{{ variable }}").build(), runContext);
|
||||
|
||||
|
||||
assertThat(runContextProperty.as(String.class, Map.of("variable", "value1"))).isEqualTo(Optional.of("value1"));
|
||||
assertThat(runContextProperty.as(String.class, Map.of("variable", "value2"))).isEqualTo(Optional.of("value1"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void asShouldNotReturnCachedRenderedPropertyWithSkipCache() throws IllegalVariableEvaluationException {
|
||||
var runContext = runContextFactory.of();
|
||||
|
||||
|
||||
var runContextProperty = new RunContextProperty<>(Property.<String>builder().expression("{{ variable }}").build(), runContext);
|
||||
|
||||
|
||||
assertThat(runContextProperty.as(String.class, Map.of("variable", "value1"))).isEqualTo(Optional.of("value1"));
|
||||
assertThat(runContextProperty.skipCache().as(String.class, Map.of("variable", "value2"))).isEqualTo(Optional.of("value2"));
|
||||
var skippedCache = runContextProperty.skipCache();
|
||||
assertThat(skippedCache.as(String.class, Map.of("variable", "value2"))).isEqualTo(Optional.of("value2"));
|
||||
// assure skipCache is preserved across calls
|
||||
assertThat(skippedCache.as(String.class, Map.of("variable", "value3"))).isEqualTo(Optional.of("value3"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void asShouldNotReturnCachedRenderedPropertyWithOfExpression() throws IllegalVariableEvaluationException {
|
||||
var runContext = runContextFactory.of();
|
||||
|
||||
var runContextProperty = new RunContextProperty<String>(Property.ofExpression("{{ variable }}"), runContext);
|
||||
|
||||
assertThat(runContextProperty.as(String.class, Map.of("variable", "value1"))).isEqualTo(Optional.of("value1"));
|
||||
assertThat(runContextProperty.as(String.class, Map.of("variable", "value2"))).isEqualTo(Optional.of("value2"));
|
||||
}
|
||||
}
|
||||
@@ -112,33 +112,6 @@ public class FileSizeFunctionTest {
|
||||
assertThat(size).isEqualTo(FILE_SIZE);
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldThrowIllegalArgumentException_givenTrigger_andParentExecution_andMissingNamespace() throws IOException {
|
||||
String executionId = IdUtils.create();
|
||||
URI internalStorageURI = getInternalStorageURI(executionId);
|
||||
URI internalStorageFile = getInternalStorageFile(internalStorageURI);
|
||||
|
||||
Map<String, Object> variables = Map.of(
|
||||
"flow", Map.of(
|
||||
"id", "subflow",
|
||||
"namespace", NAMESPACE,
|
||||
"tenantId", MAIN_TENANT),
|
||||
"execution", Map.of("id", IdUtils.create()),
|
||||
"trigger", Map.of(
|
||||
"flowId", FLOW,
|
||||
"executionId", executionId,
|
||||
"tenantId", MAIN_TENANT
|
||||
)
|
||||
);
|
||||
|
||||
Exception ex = assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> variableRenderer.render("{{ fileSize('" + internalStorageFile + "') }}", variables)
|
||||
);
|
||||
|
||||
assertTrue(ex.getMessage().startsWith("Unable to read the file"), "Exception message doesn't match expected one");
|
||||
}
|
||||
|
||||
@Test
|
||||
void returnsCorrectSize_givenUri_andCurrentExecution() throws IOException, IllegalVariableEvaluationException {
|
||||
String executionId = IdUtils.create();
|
||||
|
||||
@@ -259,6 +259,27 @@ class ReadFileFunctionTest {
|
||||
assertThat(variableRenderer.render("{{ read(nsfile) }}", variables)).isEqualTo("Hello World");
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReadChildFileEvenIfTrigger() throws IOException, IllegalVariableEvaluationException {
|
||||
String namespace = "my.namespace";
|
||||
String flowId = "flow";
|
||||
String executionId = IdUtils.create();
|
||||
URI internalStorageURI = URI.create("/" + namespace.replace(".", "/") + "/" + flowId + "/executions/" + executionId + "/tasks/task/" + IdUtils.create() + "/123456.ion");
|
||||
URI internalStorageFile = storageInterface.put(MAIN_TENANT, namespace, internalStorageURI, new ByteArrayInputStream("Hello from a task output".getBytes()));
|
||||
|
||||
Map<String, Object> variables = Map.of(
|
||||
"flow", Map.of(
|
||||
"id", "flow",
|
||||
"namespace", "notme",
|
||||
"tenantId", MAIN_TENANT),
|
||||
"execution", Map.of("id", "notme"),
|
||||
"trigger", Map.of("namespace", "notme", "flowId", "parent", "executionId", "parent")
|
||||
);
|
||||
|
||||
String render = variableRenderer.render("{{ read('" + internalStorageFile + "') }}", variables);
|
||||
assertThat(render).isEqualTo("Hello from a task output");
|
||||
}
|
||||
|
||||
private URI createFile() throws IOException {
|
||||
File tempFile = File.createTempFile("file", ".txt");
|
||||
Files.write(tempFile.toPath(), "Hello World".getBytes());
|
||||
|
||||
@@ -12,20 +12,24 @@ import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.runners.ConcurrencyLimit;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@KestraTest(startRunner = true)
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
@@ -54,14 +58,29 @@ class ConcurrencyLimitServiceTest {
|
||||
|
||||
@Test
|
||||
@LoadFlows("flows/valids/flow-concurrency-queue.yml")
|
||||
void unqueueExecution() throws QueueException, TimeoutException {
|
||||
void unqueueExecution() throws QueueException, TimeoutException, InterruptedException {
|
||||
// run a first flow so the second is queued
|
||||
runnerUtils.runOneUntilRunning(TENANT_ID, TESTS_FLOW_NS, "flow-concurrency-queue");
|
||||
Execution first = runnerUtils.runOneUntilRunning(TENANT_ID, TESTS_FLOW_NS, "flow-concurrency-queue");
|
||||
Execution result = runUntilQueued(TESTS_FLOW_NS, "flow-concurrency-queue");
|
||||
assertThat(result.getState().isQueued()).isTrue();
|
||||
|
||||
// await for the execution to be terminated
|
||||
CountDownLatch terminated = new CountDownLatch(2);
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, (either) -> {
|
||||
if (either.getLeft().getId().equals(first.getId()) && either.getLeft().getState().isTerminated()) {
|
||||
terminated.countDown();
|
||||
}
|
||||
if (either.getLeft().getId().equals(result.getId()) && either.getLeft().getState().isTerminated()) {
|
||||
terminated.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
Execution unqueued = concurrencyLimitService.unqueue(result, State.Type.RUNNING);
|
||||
assertThat(unqueued.getState().isRunning()).isTrue();
|
||||
executionQueue.emit(unqueued);
|
||||
|
||||
assertTrue(terminated.await(10, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -73,7 +92,6 @@ class ConcurrencyLimitServiceTest {
|
||||
assertThat(limit.get().getTenantId()).isEqualTo(execution.getTenantId());
|
||||
assertThat(limit.get().getNamespace()).isEqualTo(execution.getNamespace());
|
||||
assertThat(limit.get().getFlowId()).isEqualTo(execution.getFlowId());
|
||||
assertThat(limit.get().getRunning()).isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -156,6 +156,26 @@ class DownloadTest {
|
||||
assertThat(output.getUri().toString()).endsWith("filename.jpg");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void fileNameShouldOverrideContentDisposition() throws Exception {
|
||||
EmbeddedServer embeddedServer = applicationContext.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
Download task = Download.builder()
|
||||
.id(DownloadTest.class.getSimpleName())
|
||||
.type(DownloadTest.class.getName())
|
||||
.uri(Property.ofValue(embeddedServer.getURI() + "/content-disposition"))
|
||||
.saveAs(Property.ofValue("hardcoded-filename.jpg"))
|
||||
.build();
|
||||
|
||||
RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, task, ImmutableMap.of());
|
||||
|
||||
Download.Output output = task.run(runContext);
|
||||
|
||||
assertThat(output.getUri().toString()).endsWith("hardcoded-filename.jpg");
|
||||
}
|
||||
|
||||
@Test
|
||||
void contentDispositionWithPath() throws Exception {
|
||||
EmbeddedServer embeddedServer = applicationContext.getBean(EmbeddedServer.class);
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
id: flow-trigger-mixed-conditions-flow-a
|
||||
namespace: io.kestra.tests.trigger.mixed.conditions
|
||||
|
||||
labels:
|
||||
some: label
|
||||
|
||||
tasks:
|
||||
- id: only
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "from parents: {{execution.id}}"
|
||||
@@ -0,0 +1,25 @@
|
||||
id: flow-trigger-mixed-conditions-flow-listen
|
||||
namespace: io.kestra.tests.trigger.mixed.conditions
|
||||
|
||||
triggers:
|
||||
- id: on_completion
|
||||
type: io.kestra.plugin.core.trigger.Flow
|
||||
states: [ SUCCESS ]
|
||||
conditions:
|
||||
- type: io.kestra.plugin.core.condition.ExecutionFlow
|
||||
namespace: io.kestra.tests.trigger.mixed.conditions
|
||||
flowId: flow-trigger-mixed-conditions-flow-a
|
||||
- id: on_failure
|
||||
type: io.kestra.plugin.core.trigger.Flow
|
||||
states: [ FAILED ]
|
||||
preconditions:
|
||||
id: flowsFailure
|
||||
flows:
|
||||
- namespace: io.kestra.tests.trigger.multiple.conditions
|
||||
flowId: flow-trigger-multiple-conditions-flow-a
|
||||
states: [FAILED]
|
||||
|
||||
tasks:
|
||||
- id: only
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "It works"
|
||||
@@ -0,0 +1,10 @@
|
||||
id: flow-trigger-multiple-conditions-flow-a
|
||||
namespace: io.kestra.tests.trigger.multiple.conditions
|
||||
|
||||
labels:
|
||||
some: label
|
||||
|
||||
tasks:
|
||||
- id: only
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "from parents: {{execution.id}}"
|
||||
@@ -0,0 +1,23 @@
|
||||
id: flow-trigger-multiple-conditions-flow-listen
|
||||
namespace: io.kestra.tests.trigger.multiple.conditions
|
||||
|
||||
triggers:
|
||||
- id: on_completion
|
||||
type: io.kestra.plugin.core.trigger.Flow
|
||||
states: [ SUCCESS ]
|
||||
conditions:
|
||||
- type: io.kestra.plugin.core.condition.ExecutionFlow
|
||||
namespace: io.kestra.tests.trigger.multiple.conditions
|
||||
flowId: flow-trigger-multiple-conditions-flow-a
|
||||
- id: on_failure
|
||||
type: io.kestra.plugin.core.trigger.Flow
|
||||
states: [ FAILED ]
|
||||
conditions:
|
||||
- type: io.kestra.plugin.core.condition.ExecutionFlow
|
||||
namespace: io.kestra.tests.trigger.multiple.conditions
|
||||
flowId: flow-trigger-multiple-conditions-flow-a
|
||||
|
||||
tasks:
|
||||
- id: only
|
||||
type: io.kestra.plugin.core.debug.Return
|
||||
format: "It works"
|
||||
@@ -50,16 +50,147 @@ public class FlowTriggerService {
|
||||
.map(io.kestra.plugin.core.trigger.Flow.class::cast);
|
||||
}
|
||||
|
||||
public List<Execution> computeExecutionsFromFlowTriggers(Execution execution, List<? extends Flow> allFlows, Optional<MultipleConditionStorageInterface> multipleConditionStorage) {
|
||||
List<FlowWithFlowTrigger> validTriggersBeforeMultipleConditionEval = allFlows.stream()
|
||||
/**
|
||||
* This method computes executions to trigger from flow triggers from a given execution.
|
||||
* It only computes those depending on standard (non-multiple / non-preconditions) conditions, so it must be used
|
||||
* in conjunction with {@link #computeExecutionsFromFlowTriggerPreconditions(Execution, Flow, MultipleConditionStorageInterface)}.
|
||||
*/
|
||||
public List<Execution> computeExecutionsFromFlowTriggerConditions(Execution execution, Flow flow) {
|
||||
List<FlowWithFlowTrigger> flowWithFlowTriggers = computeFlowTriggers(execution, flow)
|
||||
.stream()
|
||||
// we must filter on no multiple conditions and no preconditions to avoid evaluating two times triggers that have standard conditions and multiple conditions
|
||||
.filter(it -> it.getTrigger().getPreconditions() == null && ListUtils.emptyOnNull(it.getTrigger().getConditions()).stream().noneMatch(MultipleCondition.class::isInstance))
|
||||
.toList();
|
||||
|
||||
// short-circuit empty triggers to evaluate
|
||||
if (flowWithFlowTriggers.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
// compute all executions to create from flow triggers without taken into account multiple conditions
|
||||
return flowWithFlowTriggers.stream()
|
||||
.map(f -> f.getTrigger().evaluate(
|
||||
Optional.empty(),
|
||||
runContextFactory.of(f.getFlow(), execution),
|
||||
f.getFlow(),
|
||||
execution
|
||||
))
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.toList();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method computes executions to trigger from flow triggers from a given execution.
|
||||
* It only computes those depending on multiple conditions and preconditions, so it must be used
|
||||
* in conjunction with {@link #computeExecutionsFromFlowTriggerConditions(Execution, Flow)}.
|
||||
*/
|
||||
public List<Execution> computeExecutionsFromFlowTriggerPreconditions(Execution execution, Flow flow, MultipleConditionStorageInterface multipleConditionStorage) {
|
||||
List<FlowWithFlowTrigger> flowWithFlowTriggers = computeFlowTriggers(execution, flow)
|
||||
.stream()
|
||||
// we must filter on multiple conditions or preconditions to avoid evaluating two times triggers that only have standard conditions
|
||||
.filter(flowWithFlowTrigger -> flowWithFlowTrigger.getTrigger().getPreconditions() != null || ListUtils.emptyOnNull(flowWithFlowTrigger.getTrigger().getConditions()).stream().anyMatch(MultipleCondition.class::isInstance))
|
||||
.toList();
|
||||
|
||||
// short-circuit empty triggers to evaluate
|
||||
if (flowWithFlowTriggers.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<FlowWithFlowTriggerAndMultipleCondition> flowWithMultipleConditionsToEvaluate = flowWithFlowTriggers.stream()
|
||||
.flatMap(flowWithFlowTrigger -> flowTriggerMultipleConditions(flowWithFlowTrigger)
|
||||
.map(multipleCondition -> new FlowWithFlowTriggerAndMultipleCondition(
|
||||
flowWithFlowTrigger.getFlow(),
|
||||
multipleConditionStorage.getOrCreate(flowWithFlowTrigger.getFlow(), multipleCondition, execution.getOutputs()),
|
||||
flowWithFlowTrigger.getTrigger(),
|
||||
multipleCondition
|
||||
)
|
||||
)
|
||||
)
|
||||
// avoid evaluating expired windows (for ex for daily time window or deadline)
|
||||
.filter(flowWithFlowTriggerAndMultipleCondition -> flowWithFlowTriggerAndMultipleCondition.getMultipleConditionWindow().isValid(ZonedDateTime.now()))
|
||||
.toList();
|
||||
|
||||
// evaluate multiple conditions
|
||||
Map<FlowWithFlowTriggerAndMultipleCondition, MultipleConditionWindow> multipleConditionWindowsByFlow = flowWithMultipleConditionsToEvaluate.stream().map(f -> {
|
||||
Map<String, Boolean> results = f.getMultipleCondition()
|
||||
.getConditions()
|
||||
.entrySet()
|
||||
.stream()
|
||||
.map(e -> new AbstractMap.SimpleEntry<>(
|
||||
e.getKey(),
|
||||
conditionService.isValid(e.getValue(), f.getFlow(), execution)
|
||||
))
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
|
||||
return Map.entry(f, f.getMultipleConditionWindow().with(results));
|
||||
})
|
||||
.filter(e -> !e.getValue().getResults().isEmpty())
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
|
||||
// persist results
|
||||
multipleConditionStorage.save(new ArrayList<>(multipleConditionWindowsByFlow.values()));
|
||||
|
||||
// compute all executions to create from flow triggers now that multiple conditions storage is populated
|
||||
List<Execution> executions = flowWithFlowTriggers.stream()
|
||||
// will evaluate conditions
|
||||
.filter(flowWithFlowTrigger ->
|
||||
conditionService.isValid(
|
||||
flowWithFlowTrigger.getTrigger(),
|
||||
flowWithFlowTrigger.getFlow(),
|
||||
execution,
|
||||
multipleConditionStorage
|
||||
)
|
||||
)
|
||||
// will evaluate preconditions
|
||||
.filter(flowWithFlowTrigger ->
|
||||
conditionService.isValid(
|
||||
flowWithFlowTrigger.getTrigger().getPreconditions(),
|
||||
flowWithFlowTrigger.getFlow(),
|
||||
execution,
|
||||
multipleConditionStorage
|
||||
)
|
||||
)
|
||||
.map(f -> f.getTrigger().evaluate(
|
||||
Optional.of(multipleConditionStorage),
|
||||
runContextFactory.of(f.getFlow(), execution),
|
||||
f.getFlow(),
|
||||
execution
|
||||
))
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.toList();
|
||||
|
||||
// purge fulfilled or expired multiple condition windows
|
||||
Stream.concat(
|
||||
multipleConditionWindowsByFlow.entrySet().stream()
|
||||
.map(e -> Map.entry(
|
||||
e.getKey().getMultipleCondition(),
|
||||
e.getValue()
|
||||
))
|
||||
.filter(e -> !Boolean.FALSE.equals(e.getKey().getResetOnSuccess()) &&
|
||||
e.getKey().getConditions().size() == Optional.ofNullable(e.getValue().getResults()).map(Map::size).orElse(0)
|
||||
)
|
||||
.map(Map.Entry::getValue),
|
||||
multipleConditionStorage.expired(execution.getTenantId()).stream()
|
||||
).forEach(multipleConditionStorage::delete);
|
||||
|
||||
return executions;
|
||||
}
|
||||
|
||||
private List<FlowWithFlowTrigger> computeFlowTriggers(Execution execution, Flow flow) {
|
||||
if (
|
||||
// prevent recursive flow triggers
|
||||
.filter(flow -> flowService.removeUnwanted(flow, execution))
|
||||
// filter out Test Executions
|
||||
.filter(flow -> execution.getKind() == null)
|
||||
// ensure flow & triggers are enabled
|
||||
.filter(flow -> !flow.isDisabled() && !(flow instanceof FlowWithException))
|
||||
.filter(flow -> flow.getTriggers() != null && !flow.getTriggers().isEmpty())
|
||||
.flatMap(flow -> flowTriggers(flow).map(trigger -> new FlowWithFlowTrigger(flow, trigger)))
|
||||
!flowService.removeUnwanted(flow, execution) ||
|
||||
// filter out Test Executions
|
||||
execution.getKind() != null ||
|
||||
// ensure flow & triggers are enabled
|
||||
flow.isDisabled() || flow instanceof FlowWithException ||
|
||||
flow.getTriggers() == null || flow.getTriggers().isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return flowTriggers(flow).map(trigger -> new FlowWithFlowTrigger(flow, trigger))
|
||||
// filter on the execution state the flow listen to
|
||||
.filter(flowWithFlowTrigger -> flowWithFlowTrigger.getTrigger().getStates().contains(execution.getState().getCurrent()))
|
||||
// validate flow triggers conditions excluding multiple conditions
|
||||
@@ -74,96 +205,6 @@ public class FlowTriggerService {
|
||||
execution
|
||||
)
|
||||
)).toList();
|
||||
|
||||
// short-circuit empty triggers to evaluate
|
||||
if (validTriggersBeforeMultipleConditionEval.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
Map<FlowWithFlowTriggerAndMultipleCondition, MultipleConditionWindow> multipleConditionWindowsByFlow = null;
|
||||
if (multipleConditionStorage.isPresent()) {
|
||||
List<FlowWithFlowTriggerAndMultipleCondition> flowWithMultipleConditionsToEvaluate = validTriggersBeforeMultipleConditionEval.stream()
|
||||
.flatMap(flowWithFlowTrigger -> flowTriggerMultipleConditions(flowWithFlowTrigger)
|
||||
.map(multipleCondition -> new FlowWithFlowTriggerAndMultipleCondition(
|
||||
flowWithFlowTrigger.getFlow(),
|
||||
multipleConditionStorage.get().getOrCreate(flowWithFlowTrigger.getFlow(), multipleCondition, execution.getOutputs()),
|
||||
flowWithFlowTrigger.getTrigger(),
|
||||
multipleCondition
|
||||
)
|
||||
)
|
||||
)
|
||||
// avoid evaluating expired windows (for ex for daily time window or deadline)
|
||||
.filter(flowWithFlowTriggerAndMultipleCondition -> flowWithFlowTriggerAndMultipleCondition.getMultipleConditionWindow().isValid(ZonedDateTime.now()))
|
||||
.toList();
|
||||
|
||||
// evaluate multiple conditions
|
||||
multipleConditionWindowsByFlow = flowWithMultipleConditionsToEvaluate.stream().map(f -> {
|
||||
Map<String, Boolean> results = f.getMultipleCondition()
|
||||
.getConditions()
|
||||
.entrySet()
|
||||
.stream()
|
||||
.map(e -> new AbstractMap.SimpleEntry<>(
|
||||
e.getKey(),
|
||||
conditionService.isValid(e.getValue(), f.getFlow(), execution)
|
||||
))
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
|
||||
return Map.entry(f, f.getMultipleConditionWindow().with(results));
|
||||
})
|
||||
.filter(e -> !e.getValue().getResults().isEmpty())
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
|
||||
// persist results
|
||||
multipleConditionStorage.get().save(new ArrayList<>(multipleConditionWindowsByFlow.values()));
|
||||
}
|
||||
|
||||
// compute all executions to create from flow triggers now that multiple conditions storage is populated
|
||||
List<Execution> executions = validTriggersBeforeMultipleConditionEval.stream()
|
||||
// will evaluate conditions
|
||||
.filter(flowWithFlowTrigger ->
|
||||
conditionService.isValid(
|
||||
flowWithFlowTrigger.getTrigger(),
|
||||
flowWithFlowTrigger.getFlow(),
|
||||
execution,
|
||||
multipleConditionStorage.orElse(null)
|
||||
)
|
||||
)
|
||||
// will evaluate preconditions
|
||||
.filter(flowWithFlowTrigger ->
|
||||
conditionService.isValid(
|
||||
flowWithFlowTrigger.getTrigger().getPreconditions(),
|
||||
flowWithFlowTrigger.getFlow(),
|
||||
execution,
|
||||
multipleConditionStorage.orElse(null)
|
||||
)
|
||||
)
|
||||
.map(f -> f.getTrigger().evaluate(
|
||||
multipleConditionStorage,
|
||||
runContextFactory.of(f.getFlow(), execution),
|
||||
f.getFlow(),
|
||||
execution
|
||||
))
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.toList();
|
||||
|
||||
if (multipleConditionStorage.isPresent()) {
|
||||
// purge fulfilled or expired multiple condition windows
|
||||
Stream.concat(
|
||||
multipleConditionWindowsByFlow.entrySet().stream()
|
||||
.map(e -> Map.entry(
|
||||
e.getKey().getMultipleCondition(),
|
||||
e.getValue()
|
||||
))
|
||||
.filter(e -> !Boolean.FALSE.equals(e.getKey().getResetOnSuccess()) &&
|
||||
e.getKey().getConditions().size() == Optional.ofNullable(e.getValue().getResults()).map(Map::size).orElse(0)
|
||||
)
|
||||
.map(Map.Entry::getValue),
|
||||
multipleConditionStorage.get().expired(execution.getTenantId()).stream()
|
||||
).forEach(multipleConditionStorage.get()::delete);
|
||||
}
|
||||
|
||||
return executions;
|
||||
}
|
||||
|
||||
private Stream<MultipleCondition> flowTriggerMultipleConditions(FlowWithFlowTrigger flowWithFlowTrigger) {
|
||||
|
||||
@@ -25,8 +25,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
class FlowTriggerServiceTest {
|
||||
public static final List<Label> EMPTY_LABELS = List.of();
|
||||
public static final Optional<MultipleConditionStorageInterface> EMPTY_MULTIPLE_CONDITION_STORAGE = Optional.empty();
|
||||
private static final List<Label> EMPTY_LABELS = List.of();
|
||||
|
||||
@Inject
|
||||
private TestRunContextFactory runContextFactory;
|
||||
@@ -56,14 +55,27 @@ class FlowTriggerServiceTest {
|
||||
|
||||
var simpleFlowExecution = Execution.newExecution(simpleFlow, EMPTY_LABELS).withState(State.Type.SUCCESS);
|
||||
|
||||
var resultingExecutionsToRun = flowTriggerService.computeExecutionsFromFlowTriggers(
|
||||
var resultingExecutionsToRun = flowTriggerService.computeExecutionsFromFlowTriggerConditions(
|
||||
simpleFlowExecution,
|
||||
List.of(simpleFlow, flowWithFlowTrigger),
|
||||
EMPTY_MULTIPLE_CONDITION_STORAGE
|
||||
flowWithFlowTrigger
|
||||
);
|
||||
|
||||
assertThat(resultingExecutionsToRun).size().isEqualTo(1);
|
||||
assertThat(resultingExecutionsToRun.get(0).getFlowId()).isEqualTo(flowWithFlowTrigger.getId());
|
||||
assertThat(resultingExecutionsToRun.getFirst().getFlowId()).isEqualTo(flowWithFlowTrigger.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void computeExecutionsFromFlowTriggers_none() {
|
||||
var simpleFlow = aSimpleFlow();
|
||||
|
||||
var simpleFlowExecution = Execution.newExecution(simpleFlow, EMPTY_LABELS).withState(State.Type.SUCCESS);
|
||||
|
||||
var resultingExecutionsToRun = flowTriggerService.computeExecutionsFromFlowTriggerConditions(
|
||||
simpleFlowExecution,
|
||||
simpleFlow
|
||||
);
|
||||
|
||||
assertThat(resultingExecutionsToRun).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -81,10 +93,9 @@ class FlowTriggerServiceTest {
|
||||
|
||||
var simpleFlowExecution = Execution.newExecution(simpleFlow, EMPTY_LABELS).withState(State.Type.CREATED);
|
||||
|
||||
var resultingExecutionsToRun = flowTriggerService.computeExecutionsFromFlowTriggers(
|
||||
var resultingExecutionsToRun = flowTriggerService.computeExecutionsFromFlowTriggerConditions(
|
||||
simpleFlowExecution,
|
||||
List.of(simpleFlow, flowWithFlowTrigger),
|
||||
EMPTY_MULTIPLE_CONDITION_STORAGE
|
||||
flowWithFlowTrigger
|
||||
);
|
||||
|
||||
assertThat(resultingExecutionsToRun).size().isEqualTo(0);
|
||||
@@ -109,10 +120,9 @@ class FlowTriggerServiceTest {
|
||||
.kind(ExecutionKind.TEST)
|
||||
.build();
|
||||
|
||||
var resultingExecutionsToRun = flowTriggerService.computeExecutionsFromFlowTriggers(
|
||||
var resultingExecutionsToRun = flowTriggerService.computeExecutionsFromFlowTriggerConditions(
|
||||
simpleFlowExecutionComingFromATest,
|
||||
List.of(simpleFlow, flowWithFlowTrigger),
|
||||
EMPTY_MULTIPLE_CONDITION_STORAGE
|
||||
flowWithFlowTrigger
|
||||
);
|
||||
|
||||
assertThat(resultingExecutionsToRun).size().isEqualTo(0);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
version=1.1.0-SNAPSHOT
|
||||
version=1.1.5
|
||||
|
||||
org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m -XX:+HeapDumpOnOutOfMemoryError
|
||||
org.gradle.parallel=true
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
package io.kestra.repository.mysql;
|
||||
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.runners.ScheduleContextInterface;
|
||||
import io.kestra.core.utils.DateUtils;
|
||||
import io.kestra.jdbc.repository.AbstractJdbcTriggerRepository;
|
||||
import io.kestra.jdbc.runner.JdbcSchedulerContext;
|
||||
import io.kestra.jdbc.services.JdbcFilterService;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
@@ -11,6 +13,10 @@ import org.jooq.Condition;
|
||||
import org.jooq.Field;
|
||||
import org.jooq.impl.DSL;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.temporal.Temporal;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
@@ -45,4 +51,11 @@ public class MysqlTriggerRepository extends AbstractJdbcTriggerRepository {
|
||||
throw new IllegalArgumentException("Unsupported GroupType: " + groupType);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Temporal toNextExecutionTime(ZonedDateTime now) {
|
||||
// next_execution_date in the table is stored in UTC
|
||||
// convert 'now' to UTC LocalDateTime to avoid any timezone/offset interpretation by the database.
|
||||
return now.withZoneSameInstant(ZoneOffset.UTC).toLocalDateTime();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.FluxSink;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.temporal.Temporal;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -151,7 +152,7 @@ public abstract class AbstractJdbcTriggerRepository extends AbstractJdbcReposito
|
||||
.select(field("value"))
|
||||
.from(this.jdbcRepository.getTable())
|
||||
.where(
|
||||
(field("next_execution_date").lessThan(now.toOffsetDateTime())
|
||||
(field("next_execution_date").lessThan(toNextExecutionTime(now))
|
||||
// we check for null for backwards compatibility
|
||||
.or(field("next_execution_date").isNull()))
|
||||
.and(field("execution_id").isNull())
|
||||
@@ -162,14 +163,14 @@ public abstract class AbstractJdbcTriggerRepository extends AbstractJdbcReposito
|
||||
.fetch()
|
||||
.map(r -> this.jdbcRepository.deserialize(r.get("value", String.class)));
|
||||
}
|
||||
|
||||
|
||||
public List<Trigger> findByNextExecutionDateReadyButLockedTriggers(ZonedDateTime now) {
|
||||
return this.jdbcRepository.getDslContextWrapper()
|
||||
.transactionResult(configuration -> DSL.using(configuration)
|
||||
.select(field("value"))
|
||||
.from(this.jdbcRepository.getTable())
|
||||
.where(
|
||||
(field("next_execution_date").lessThan(now.toOffsetDateTime())
|
||||
(field("next_execution_date").lessThan(toNextExecutionTime(now))
|
||||
// we check for null for backwards compatibility
|
||||
.or(field("next_execution_date").isNull()))
|
||||
.and(field("execution_id").isNotNull())
|
||||
@@ -178,6 +179,10 @@ public abstract class AbstractJdbcTriggerRepository extends AbstractJdbcReposito
|
||||
.fetch()
|
||||
.map(r -> this.jdbcRepository.deserialize(r.get("value", String.class))));
|
||||
}
|
||||
|
||||
protected Temporal toNextExecutionTime(ZonedDateTime now) {
|
||||
return now.toOffsetDateTime();
|
||||
}
|
||||
|
||||
public Trigger save(Trigger trigger, ScheduleContextInterface scheduleContextInterface) {
|
||||
JdbcSchedulerContext jdbcSchedulerContext = (JdbcSchedulerContext) scheduleContextInterface;
|
||||
|
||||
@@ -22,10 +22,10 @@ public class AbstractJdbcConcurrencyLimitStorage extends AbstractJdbcRepository
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the concurrency limit counter then process the count using the consumer function.
|
||||
* It locked the raw and is wrapped in a transaction so the consumer should use the provided dslContext for any database access.
|
||||
* Fetch the concurrency limit counter, then process the count using the consumer function.
|
||||
* It locked the raw and is wrapped in a transaction, so the consumer should use the provided dslContext for any database access.
|
||||
* <p>
|
||||
* Note that to avoid a race when no concurrency limit counter exists, it first always try to insert a 0 counter.
|
||||
* Note that to avoid a race when no concurrency limit counter exists, it first always tries to insert a 0 counter.
|
||||
*/
|
||||
public ExecutionRunning countThenProcess(FlowInterface flow, BiFunction<DSLContext, ConcurrencyLimit, Pair<ExecutionRunning, ConcurrencyLimit>> consumer) {
|
||||
return this.jdbcRepository
|
||||
@@ -97,7 +97,7 @@ public class AbstractJdbcConcurrencyLimitStorage extends AbstractJdbcRepository
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all concurrency limit from the database
|
||||
* Returns all concurrency limits from the database
|
||||
*/
|
||||
public List<ConcurrencyLimit> find(String tenantId) {
|
||||
return this.jdbcRepository
|
||||
@@ -132,8 +132,7 @@ public class AbstractJdbcConcurrencyLimitStorage extends AbstractJdbcRepository
|
||||
.and(field("namespace").eq(flow.getNamespace()))
|
||||
.and(field("flow_id").eq(flow.getId()));
|
||||
|
||||
return Optional.ofNullable(select.forUpdate().fetchOne())
|
||||
.map(record -> this.jdbcRepository.map(record));
|
||||
return this.jdbcRepository.fetchOne(select.forUpdate());
|
||||
}
|
||||
|
||||
private void update(DSLContext dslContext, ConcurrencyLimit concurrencyLimit) {
|
||||
|
||||
@@ -12,7 +12,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public abstract class AbstractJdbcExecutionQueuedStorage extends AbstractJdbcRepository {
|
||||
protected io.kestra.jdbc.AbstractJdbcRepository<ExecutionQueued> jdbcRepository;
|
||||
@@ -70,18 +69,12 @@ public abstract class AbstractJdbcExecutionQueuedStorage extends AbstractJdbcRep
|
||||
this.jdbcRepository
|
||||
.getDslContextWrapper()
|
||||
.transaction(configuration -> {
|
||||
var select = DSL
|
||||
.using(configuration)
|
||||
.select(AbstractJdbcRepository.field("value"))
|
||||
.from(this.jdbcRepository.getTable())
|
||||
.where(buildTenantCondition(execution.getTenantId()))
|
||||
.and(field("key").eq(IdUtils.fromParts(execution.getTenantId(), execution.getNamespace(), execution.getFlowId(), execution.getId())))
|
||||
.forUpdate();
|
||||
|
||||
Optional<ExecutionQueued> maybeExecution = this.jdbcRepository.fetchOne(select);
|
||||
if (maybeExecution.isPresent()) {
|
||||
this.jdbcRepository.delete(maybeExecution.get());
|
||||
}
|
||||
DSL
|
||||
.using(configuration)
|
||||
.deleteFrom(this.jdbcRepository.getTable())
|
||||
.where(buildTenantCondition(execution.getTenantId()))
|
||||
.and(field("key").eq(IdUtils.fromParts(execution.getTenantId(), execution.getNamespace(), execution.getFlowId(), execution.getId())))
|
||||
.execute();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -424,7 +424,7 @@ public class JdbcExecutor implements ExecutorInterface {
|
||||
|
||||
MultipleConditionEvent multipleConditionEvent = either.getLeft();
|
||||
|
||||
flowTriggerService.computeExecutionsFromFlowTriggers(multipleConditionEvent.execution(), List.of(multipleConditionEvent.flow()), Optional.of(multipleConditionStorage))
|
||||
flowTriggerService.computeExecutionsFromFlowTriggerPreconditions(multipleConditionEvent.execution(), multipleConditionEvent.flow(), multipleConditionStorage)
|
||||
.forEach(exec -> {
|
||||
try {
|
||||
executionQueue.emit(exec);
|
||||
@@ -1230,8 +1230,10 @@ public class JdbcExecutor implements ExecutorInterface {
|
||||
private void processFlowTriggers(Execution execution) throws QueueException {
|
||||
// directly process simple conditions
|
||||
flowTriggerService.withFlowTriggersOnly(allFlows.stream())
|
||||
.filter(f ->ListUtils.emptyOnNull(f.getTrigger().getConditions()).stream().noneMatch(c -> c instanceof MultipleCondition) && f.getTrigger().getPreconditions() == null)
|
||||
.flatMap(f -> flowTriggerService.computeExecutionsFromFlowTriggers(execution, List.of(f.getFlow()), Optional.empty()).stream())
|
||||
.filter(f -> ListUtils.emptyOnNull(f.getTrigger().getConditions()).stream().noneMatch(c -> c instanceof MultipleCondition) && f.getTrigger().getPreconditions() == null)
|
||||
.map(f -> f.getFlow())
|
||||
.distinct() // as computeExecutionsFromFlowTriggers is based on flow, we must map FlowWithFlowTrigger to a flow and distinct to avoid multiple execution for the same flow
|
||||
.flatMap(f -> flowTriggerService.computeExecutionsFromFlowTriggerConditions(execution, f).stream())
|
||||
.forEach(throwConsumer(exec -> executionQueue.emit(exec)));
|
||||
|
||||
// send multiple conditions to the multiple condition queue for later processing
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.repositories.TriggerRepositoryInterface;
|
||||
import io.kestra.core.runners.ScheduleContextInterface;
|
||||
import io.kestra.core.runners.Scheduler;
|
||||
import io.kestra.core.runners.SchedulerTriggerStateInterface;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.core.services.FlowService;
|
||||
@@ -56,6 +57,9 @@ public class JdbcScheduler extends AbstractScheduler {
|
||||
.forEach(abstractTrigger -> triggerRepository.delete(Trigger.of(flow, abstractTrigger)));
|
||||
}
|
||||
});
|
||||
|
||||
// No-op consumption of the trigger queue, so the events are purged from the queue
|
||||
this.triggerQueue.receive(Scheduler.class, trigger -> { });
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -115,6 +115,10 @@ public abstract class JdbcServiceLivenessCoordinatorTest {
|
||||
if (either.getLeft().getTaskRun().getState().getCurrent() == Type.RUNNING) {
|
||||
runningLatch.countDown();
|
||||
}
|
||||
|
||||
if (either.getLeft().getTaskRun().getState().getCurrent() == Type.FAILED) {
|
||||
fail("Worker task result should not be in FAILED state as it should be resubmitted");
|
||||
}
|
||||
});
|
||||
|
||||
workerJobQueue.emit(workerTask(Duration.ofSeconds(5)));
|
||||
|
||||
@@ -25,7 +25,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
@Slf4j
|
||||
@Singleton
|
||||
public class TestRunner implements Runnable, AutoCloseable {
|
||||
@Setter private int workerThread = Math.max(3, Runtime.getRuntime().availableProcessors());
|
||||
@Setter private int workerThread = Math.max(3, Runtime.getRuntime().availableProcessors()) * 16;
|
||||
@Setter private boolean schedulerEnabled = true;
|
||||
@Setter private boolean workerEnabled = true;
|
||||
|
||||
|
||||
@@ -26,6 +26,26 @@
|
||||
document.getElementsByTagName("html")[0].classList.add(localStorage.getItem("theme"));
|
||||
}
|
||||
</script>
|
||||
|
||||
<!-- Optional but recommended for faster connection -->
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
|
||||
<!-- Load Google Fonts non-blocking -->
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
|
||||
media="print"
|
||||
onload="this.media='all'"
|
||||
>
|
||||
|
||||
<!-- Fallback for when JavaScript is disabled -->
|
||||
<noscript>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://fonts.googleapis.com/css2?family=Public+Sans:wght@300;400;700;800&family=Source+Code+Pro:wght@400;700;800&display=swap"
|
||||
>
|
||||
</noscript>
|
||||
</head>
|
||||
<body>
|
||||
<noscript>
|
||||
|
||||
12
ui/package-lock.json
generated
12
ui/package-lock.json
generated
@@ -24,7 +24,6 @@
|
||||
"cronstrue": "^3.9.0",
|
||||
"cytoscape": "^3.33.0",
|
||||
"dagre": "^0.8.5",
|
||||
"el-table-infinite-scroll": "^3.0.7",
|
||||
"element-plus": "2.11.5",
|
||||
"humanize-duration": "^3.33.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
@@ -9941,17 +9940,6 @@
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/el-table-infinite-scroll": {
|
||||
"version": "3.0.7",
|
||||
"resolved": "https://registry.npmjs.org/el-table-infinite-scroll/-/el-table-infinite-scroll-3.0.7.tgz",
|
||||
"integrity": "sha512-at7f8GjNzvkf16i5kCBb1MOq6wI65k+TuaSt5wgiOLAKvdTr36+wAvnOnPYVIPhEpGeM8mRgLZQr2b5YV0lQaw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"core-js": "^3.x",
|
||||
"element-plus": "^2.x",
|
||||
"vue": "^3.x"
|
||||
}
|
||||
},
|
||||
"node_modules/electron-to-chromium": {
|
||||
"version": "1.5.207",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.207.tgz",
|
||||
|
||||
@@ -38,7 +38,6 @@
|
||||
"cronstrue": "^3.9.0",
|
||||
"cytoscape": "^3.33.0",
|
||||
"dagre": "^0.8.5",
|
||||
"el-table-infinite-scroll": "^3.0.7",
|
||||
"element-plus": "2.11.5",
|
||||
"humanize-duration": "^3.33.1",
|
||||
"js-yaml": "^4.1.0",
|
||||
|
||||
@@ -35,16 +35,18 @@
|
||||
<WeatherSunny v-else />
|
||||
</el-button>
|
||||
</div>
|
||||
<div class="panelWrapper" :class="{panelTabResizing: resizing}" :style="{width: activeTab?.length ? `${panelWidth}px` : 0}">
|
||||
<div class="panelWrapper" ref="panelWrapper" :class="{panelTabResizing: resizing}" :style="{width: activeTab?.length ? `${panelWidth}px` : 0}">
|
||||
<div :style="{overflow: 'hidden'}">
|
||||
<button v-if="activeTab.length" class="closeButton" @click="setActiveTab('')">
|
||||
<Close />
|
||||
</button>
|
||||
<ContextDocs v-if="activeTab === 'docs'" />
|
||||
<ContextNews v-else-if="activeTab === 'news'" />
|
||||
<template v-else>
|
||||
{{ activeTab }}
|
||||
</template>
|
||||
<KeepAlive>
|
||||
<ContextDocs v-if="activeTab === 'docs'" />
|
||||
<ContextNews v-else-if="activeTab === 'news'" />
|
||||
<template v-else>
|
||||
{{ activeTab }}
|
||||
</template>
|
||||
</KeepAlive>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
@@ -96,6 +98,7 @@
|
||||
});
|
||||
|
||||
const panelWidth = ref(640)
|
||||
const panelWrapper = ref<HTMLDivElement | null>(null)
|
||||
|
||||
const {startResizing, resizing} = useResizablePanel(activeTab)
|
||||
|
||||
|
||||
@@ -4,14 +4,22 @@
|
||||
<slot name="back-button" />
|
||||
<h2>{{ title }}</h2>
|
||||
</div>
|
||||
<div class="content">
|
||||
<div class="content" ref="contentRef">
|
||||
<slot />
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import {ref} from "vue";
|
||||
|
||||
defineProps<{title:string}>();
|
||||
|
||||
const contentRef = ref<HTMLDivElement | null>(null);
|
||||
|
||||
defineExpose({
|
||||
contentRef
|
||||
});
|
||||
</script>
|
||||
|
||||
<style scoped lang="scss">
|
||||
|
||||
@@ -197,7 +197,6 @@
|
||||
|
||||
import {trackTabOpen, trackTabClose} from "../utils/tabTracking";
|
||||
import {Panel, Tab, TabLive} from "../utils/multiPanelTypes";
|
||||
import {usePanelDefaultSize} from "../composables/usePanelDefaultSize";
|
||||
|
||||
const {t} = useI18n();
|
||||
const {showKeyShortcuts} = useKeyShortcuts();
|
||||
@@ -449,7 +448,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
const defaultSize = usePanelDefaultSize(panels);
|
||||
const defaultSize = computed(() => panels.value.length === 0 ? 1 : (panels.value.reduce((acc, panel) => acc + panel.size, 0) / panels.value.length));
|
||||
|
||||
function newPanelDrop(_e: DragEvent, direction: "left" | "right") {
|
||||
if (!movedTabInfo.value) return;
|
||||
|
||||
@@ -22,6 +22,8 @@
|
||||
columns: optionalColumns,
|
||||
storageKey: storageKey
|
||||
}"
|
||||
:defaultScope="false"
|
||||
:defaultTimeRange="false"
|
||||
/>
|
||||
</template>
|
||||
<template #table>
|
||||
@@ -298,6 +300,7 @@
|
||||
<script setup lang="ts">
|
||||
import _merge from "lodash/merge";
|
||||
import {ref, computed, watch} from "vue";
|
||||
import moment from "moment";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute} from "vue-router";
|
||||
import {ElMessage} from "element-plus";
|
||||
@@ -333,11 +336,9 @@
|
||||
import TopNavBar from "../layout/TopNavBar.vue";
|
||||
import BulkSelect from "../layout/BulkSelect.vue";
|
||||
import LogsWrapper from "../logs/LogsWrapper.vue";
|
||||
//@ts-expect-error No declaration file
|
||||
import SelectTable from "../layout/SelectTable.vue";
|
||||
import TriggerAvatar from "../flows/TriggerAvatar.vue";
|
||||
import KSFilter from "../filter/components/KSFilter.vue";
|
||||
import useRestoreUrl from "../../composables/useRestoreUrl";
|
||||
import MarkdownTooltip from "../layout/MarkdownTooltip.vue";
|
||||
import useRouteContext from "../../composables/useRouteContext";
|
||||
|
||||
@@ -436,8 +437,6 @@
|
||||
.filter(Boolean) as ColumnConfig[]
|
||||
);
|
||||
|
||||
const {saveRestoreUrl} = useRestoreUrl();
|
||||
|
||||
const loadData = (callback?: () => void) => {
|
||||
const query = loadQuery({
|
||||
size: parseInt(String(route.query?.size ?? "25")),
|
||||
@@ -463,8 +462,7 @@
|
||||
|
||||
const {ready, onSort, onPageChanged, queryWithFilter, load} = useDataTableActions({
|
||||
dataTableRef: dataTable,
|
||||
loadData,
|
||||
saveRestoreUrl
|
||||
loadData
|
||||
});
|
||||
|
||||
const {
|
||||
@@ -696,7 +694,16 @@
|
||||
};
|
||||
|
||||
const loadQuery = (base: any) => {
|
||||
let queryFilter = queryWithFilter();
|
||||
const queryFilter = queryWithFilter();
|
||||
|
||||
const timeRange = queryFilter["filters[timeRange][EQUALS]"];
|
||||
if (timeRange) {
|
||||
const end = new Date();
|
||||
const start = new Date(end.getTime() - moment.duration(timeRange).asMilliseconds());
|
||||
queryFilter["filters[startDate][GREATER_THAN_OR_EQUAL_TO]"] = start.toISOString();
|
||||
queryFilter["filters[endDate][LESS_THAN_OR_EQUAL_TO]"] = end.toISOString();
|
||||
delete queryFilter["filters[timeRange][EQUALS]"];
|
||||
}
|
||||
|
||||
return _merge(base, queryFilter);
|
||||
};
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import {computed, onBeforeMount, ref, useTemplateRef} from "vue";
|
||||
import {computed, onBeforeMount, ref, useTemplateRef, watch} from "vue";
|
||||
import {stringify, parse} from "@kestra-io/ui-libs/flow-yaml-utils";
|
||||
|
||||
import type {Dashboard, Chart} from "./composables/useDashboards";
|
||||
@@ -89,9 +89,16 @@
|
||||
}
|
||||
|
||||
if (!props.isFlow && !props.isNamespace) {
|
||||
// Preserve timeRange filter when switching dashboards
|
||||
const preservedQuery = Object.fromEntries(
|
||||
Object.entries(route.query).filter(([key]) =>
|
||||
key.includes("timeRange")
|
||||
)
|
||||
);
|
||||
|
||||
router.replace({
|
||||
params: {...route.params, dashboard: id},
|
||||
query: route.params.dashboard !== id ? {} : {...route.query},
|
||||
query: route.params.dashboard !== id ? preservedQuery : {...route.query},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -102,8 +109,22 @@
|
||||
onBeforeMount(() => {
|
||||
const ID = getDashboard(route, "id");
|
||||
|
||||
if (props.isFlow && ID === "default") load("default", processFlowYaml(YAML_FLOW, route.params.namespace as string, route.params.id as string));
|
||||
else if (props.isNamespace && ID === "default") load("default", YAML_NAMESPACE);
|
||||
if (props.isFlow) {
|
||||
load(ID, processFlowYaml(YAML_FLOW, route.params.namespace as string, route.params.id as string));
|
||||
} else if (props.isNamespace) {
|
||||
load(ID, YAML_NAMESPACE);
|
||||
}
|
||||
});
|
||||
|
||||
watch(() => getDashboard(route, "id"), (newId, oldId) => {
|
||||
if (newId !== oldId) {
|
||||
const defaultYAML = props.isFlow
|
||||
? processFlowYaml(YAML_FLOW, route.params.namespace as string, route.params.id as string)
|
||||
: props.isNamespace
|
||||
? YAML_NAMESPACE
|
||||
: YAML_MAIN;
|
||||
load(newId, defaultYAML);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
|
||||
@@ -28,33 +28,10 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function loadChart(chart: any) {
|
||||
const yamlChart = YAML_UTILS.stringify(chart);
|
||||
const result: { error: string | null; data: null | {
|
||||
id?: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
chartOptions?: Record<string, any>;
|
||||
dataFilters?: any[];
|
||||
charts?: any[];
|
||||
}; raw: any } = {
|
||||
error: null,
|
||||
data: null,
|
||||
raw: {}
|
||||
};
|
||||
const errors = await dashboardStore.validateChart(yamlChart);
|
||||
if (errors.constraints) {
|
||||
result.error = errors.constraints;
|
||||
} else {
|
||||
result.data = {...chart, content: yamlChart, raw: chart};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function updateChartPreview(event: any) {
|
||||
const chart = YAML_UTILS.getChartAtPosition(event.model.getValue(), event.position);
|
||||
if (chart) {
|
||||
const result = await loadChart(chart);
|
||||
const result = await dashboardStore.loadChart(chart);
|
||||
dashboardStore.selectedChart = typeof result.data === "object"
|
||||
? {
|
||||
...result.data,
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
<template>
|
||||
<div class="button-top">
|
||||
<ValidationError class="mx-3" tooltipPlacement="bottom-start" :errors="errors" />
|
||||
<ValidationError
|
||||
class="mx-3"
|
||||
tooltipPlacement="bottom-start"
|
||||
:errors="dashboardStore.errors"
|
||||
:warnings="dashboardStore.warnings"
|
||||
/>
|
||||
|
||||
<el-button
|
||||
:icon="ContentSave"
|
||||
@@ -17,6 +22,7 @@
|
||||
import {useI18n} from "vue-i18n";
|
||||
import ContentSave from "vue-material-design-icons/ContentSave.vue";
|
||||
import ValidationError from "../../flows/ValidationError.vue";
|
||||
import {useDashboardStore} from "../../../stores/dashboard";
|
||||
|
||||
const {t} = useI18n();
|
||||
|
||||
@@ -24,15 +30,11 @@
|
||||
(e: "save"): void;
|
||||
}>();
|
||||
|
||||
const props = defineProps<{
|
||||
warnings?: string[];
|
||||
errors?: string[];
|
||||
disabled?: boolean;
|
||||
}>();
|
||||
const dashboardStore = useDashboardStore();
|
||||
|
||||
const saveButtonType = computed(() => {
|
||||
if (props.errors) return "danger";
|
||||
return props.warnings ? "warning" : "primary";
|
||||
if (dashboardStore.errors) return "danger";
|
||||
return dashboardStore.warnings ? "warning" : "primary";
|
||||
});
|
||||
</script>
|
||||
<style lang="scss" scoped>
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
FIELDNAME_INJECTION_KEY,
|
||||
FULL_SCHEMA_INJECTION_KEY,
|
||||
FULL_SOURCE_INJECTION_KEY,
|
||||
ON_TASK_EDITOR_CLICK_INJECTION_KEY,
|
||||
PARENT_PATH_INJECTION_KEY,
|
||||
POSITION_INJECTION_KEY,
|
||||
REF_PATH_INJECTION_KEY,
|
||||
@@ -111,6 +112,15 @@
|
||||
provide(BLOCK_SCHEMA_PATH_INJECTION_KEY, computed(() => props.blockSchemaPath ?? dashboardStore.schema.$ref ?? ""));
|
||||
provide(FULL_SOURCE_INJECTION_KEY, computed(() => dashboardStore.sourceCode ?? ""));
|
||||
provide(POSITION_INJECTION_KEY, props.position ?? "after");
|
||||
provide(ON_TASK_EDITOR_CLICK_INJECTION_KEY, (elt) => {
|
||||
const type = elt?.type;
|
||||
dashboardStore.loadChart(elt);
|
||||
if(type){
|
||||
pluginsStore.updateDocumentation({type});
|
||||
}else{
|
||||
pluginsStore.updateDocumentation();
|
||||
}
|
||||
})
|
||||
|
||||
const pluginsStore = usePluginsStore();
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
<template>
|
||||
<div class="w-100 p-4">
|
||||
<Sections
|
||||
:key="dashboardStore.sourceCode"
|
||||
:dashboard="{id: 'default', charts: []}"
|
||||
:charts="charts.map(chart => chart.data).filter(chart => chart !== null)"
|
||||
showDefault
|
||||
@@ -9,11 +10,12 @@
|
||||
</template>
|
||||
|
||||
<script lang="ts" setup>
|
||||
import {onMounted, ref} from "vue";
|
||||
import {ref, watch} from "vue";
|
||||
import Sections from "../sections/Sections.vue";
|
||||
import {Chart} from "../composables/useDashboards";
|
||||
import {useDashboardStore} from "../../../stores/dashboard";
|
||||
import * as YAML_UTILS from "@kestra-io/ui-libs/flow-yaml-utils";
|
||||
import throttle from "lodash/throttle";
|
||||
|
||||
interface Result {
|
||||
error: string[] | null;
|
||||
@@ -23,21 +25,27 @@
|
||||
|
||||
const charts = ref<Result[]>([])
|
||||
|
||||
onMounted(async () => {
|
||||
validateAndLoadAllCharts();
|
||||
});
|
||||
|
||||
const dashboardStore = useDashboardStore();
|
||||
|
||||
function validateAndLoadAllCharts() {
|
||||
charts.value = [];
|
||||
const validateAndLoadAllChartsThrottled = throttle(validateAndLoadAllCharts, 500);
|
||||
|
||||
async function validateAndLoadAllCharts() {
|
||||
const allCharts = YAML_UTILS.getAllCharts(dashboardStore.sourceCode) ?? [];
|
||||
allCharts.forEach(async (chart: any) => {
|
||||
const loadedChart = await loadChart(chart);
|
||||
charts.value.push(loadedChart);
|
||||
});
|
||||
charts.value = await Promise.all(allCharts.map(async (chart: any) => {
|
||||
return loadChart(chart);
|
||||
}));
|
||||
}
|
||||
|
||||
watch(
|
||||
() => dashboardStore.sourceCode,
|
||||
() => {
|
||||
validateAndLoadAllChartsThrottled();
|
||||
}
|
||||
, {immediate: true}
|
||||
);
|
||||
|
||||
|
||||
|
||||
async function loadChart(chart: any) {
|
||||
const yamlChart = YAML_UTILS.stringify(chart);
|
||||
const result: Result = {
|
||||
|
||||
@@ -96,14 +96,19 @@
|
||||
return [DEFAULT, ...dashboards.value].filter((d) => !search.value || d.title.toLowerCase().includes(search.value.toLowerCase()));
|
||||
});
|
||||
|
||||
const ID = getDashboard(route, "id") as string;
|
||||
|
||||
const selected = ref(null);
|
||||
const STORAGE_KEY = getDashboard(route, "key");
|
||||
|
||||
const selected = ref<string | null>(null);
|
||||
const select = (dashboard: any) => {
|
||||
selected.value = dashboard?.title;
|
||||
|
||||
if (dashboard?.id) localStorage.setItem(ID, dashboard.id)
|
||||
else localStorage.removeItem(ID);
|
||||
if (STORAGE_KEY) {
|
||||
if (dashboard?.id) {
|
||||
localStorage.setItem(STORAGE_KEY, dashboard.id);
|
||||
} else {
|
||||
localStorage.removeItem(STORAGE_KEY);
|
||||
}
|
||||
}
|
||||
|
||||
emits("dashboard", dashboard.id);
|
||||
};
|
||||
@@ -121,7 +126,7 @@
|
||||
});
|
||||
};
|
||||
|
||||
const fetchLast = () => localStorage.getItem(ID);
|
||||
const getStoredDashboard = () => STORAGE_KEY ? localStorage.getItem(STORAGE_KEY) : null;
|
||||
const fetchDashboards = () => {
|
||||
dashboardStore
|
||||
.list({})
|
||||
@@ -129,13 +134,17 @@
|
||||
dashboards.value = response.results;
|
||||
|
||||
const creation = Boolean(route.query.created);
|
||||
const lastSelected = creation ? (route.params?.dashboard ?? fetchLast()) : (fetchLast() ?? route.params?.dashboard);
|
||||
const lastSelected = creation
|
||||
? (route.params?.dashboard ?? getStoredDashboard())
|
||||
: (getStoredDashboard() ?? route.params?.dashboard);
|
||||
|
||||
if (lastSelected) {
|
||||
const dashboard = dashboards.value.find((d) => d.id === lastSelected);
|
||||
|
||||
if (dashboard) select(dashboard);
|
||||
else {
|
||||
if (dashboard) {
|
||||
selected.value = dashboard.title;
|
||||
emits("dashboard", dashboard.id);
|
||||
} else {
|
||||
selected.value = null;
|
||||
emits("dashboard", "default");
|
||||
}
|
||||
@@ -145,15 +154,19 @@
|
||||
|
||||
onBeforeMount(() => fetchDashboards());
|
||||
|
||||
const tenant = ref(route.params.tenant);
|
||||
watch(route, (r) => {
|
||||
if (tenant.value !== r.params.tenant) {
|
||||
fetchDashboards();
|
||||
tenant.value = r.params.tenant;
|
||||
}
|
||||
},
|
||||
{deep: true},
|
||||
);
|
||||
const tenant = ref();
|
||||
watch(() => route.params.tenant, (t) => {
|
||||
if (tenant.value !== t) {
|
||||
fetchDashboards();
|
||||
tenant.value = t;
|
||||
}
|
||||
}, {immediate: true});
|
||||
|
||||
watch(() => route.params?.dashboard, (val) => {
|
||||
if(route.name === "home" && STORAGE_KEY) {
|
||||
localStorage.setItem(STORAGE_KEY, val as string);
|
||||
}
|
||||
}, {immediate: true});
|
||||
</script>
|
||||
|
||||
<style scoped lang="scss">
|
||||
@@ -161,14 +174,6 @@
|
||||
span{
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
:deep(svg){
|
||||
color: var(--ks-content-tertiary);
|
||||
font-size: 1.10rem;
|
||||
position: absolute;
|
||||
bottom: -0.10rem;
|
||||
right: 0.08rem;
|
||||
}
|
||||
}
|
||||
.dropdown {
|
||||
width: 300px;
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
<template>
|
||||
<section id="charts" :class="{padding}">
|
||||
<el-row :gutter="16">
|
||||
<el-col
|
||||
<div class="dashboard-sections-container">
|
||||
<section id="charts" :class="{padding}">
|
||||
<div
|
||||
v-for="chart in props.charts"
|
||||
:key="`chart__${chart.id}`"
|
||||
:xs="24"
|
||||
:sm="(chart.chartOptions?.width || 6) * 4"
|
||||
:md="(chart.chartOptions?.width || 6) * 2"
|
||||
class="dashboard-block"
|
||||
:class="{
|
||||
[`dash-width-${chart.chartOptions?.width || 6}`]: true
|
||||
}"
|
||||
>
|
||||
<div class="d-flex flex-column">
|
||||
<div class="d-flex justify-content-between">
|
||||
@@ -64,9 +65,9 @@
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</el-col>
|
||||
</el-row>
|
||||
</section>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
@@ -133,14 +134,28 @@
|
||||
<style scoped lang="scss">
|
||||
@import "@kestra-io/ui-libs/src/scss/variables";
|
||||
|
||||
.dashboard-sections-container{
|
||||
container-type: inline-size;
|
||||
}
|
||||
|
||||
$smallMobile: 375px;
|
||||
$tablet: 768px;
|
||||
|
||||
section#charts {
|
||||
display: grid;
|
||||
gap: 1rem;
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
@container (min-width: #{$smallMobile}) {
|
||||
grid-template-columns: repeat(6, 1fr);
|
||||
}
|
||||
@container (min-width: #{$tablet}) {
|
||||
grid-template-columns: repeat(12, 1fr);
|
||||
}
|
||||
&.padding {
|
||||
padding: 0 2rem 1rem;
|
||||
}
|
||||
|
||||
& .el-row .el-col {
|
||||
margin-bottom: 1rem;
|
||||
|
||||
.dashboard-block {
|
||||
& > div {
|
||||
height: 100%;
|
||||
padding: 1.5rem;
|
||||
@@ -159,5 +174,24 @@ section#charts {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.dash-width-3, .dash-width-6, .dash-width-9, .dash-width-12 {
|
||||
grid-column: span 3;
|
||||
}
|
||||
|
||||
@container (min-width: #{$smallMobile}) {
|
||||
.dash-width-6, .dash-width-9, .dash-width-12 {
|
||||
grid-column: span 6;
|
||||
}
|
||||
}
|
||||
|
||||
@container (min-width: #{$tablet}) {
|
||||
.dash-width-9 {
|
||||
grid-column: span 9;
|
||||
}
|
||||
.dash-width-12 {
|
||||
grid-column: span 12;
|
||||
}
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<ContextInfoContent :title="routeInfo.title">
|
||||
<ContextInfoContent :title="routeInfo.title" ref="contextInfoRef">
|
||||
<template v-if="isOnline" #back-button>
|
||||
<button
|
||||
class="back-button"
|
||||
@@ -26,7 +26,7 @@
|
||||
<OpenInNew class="blank" />
|
||||
</router-link>
|
||||
</template>
|
||||
<div ref="docWrapper" class="docs-controls">
|
||||
<div class="docs-controls">
|
||||
<template v-if="isOnline">
|
||||
<ContextDocsSearch />
|
||||
<DocsMenu />
|
||||
@@ -42,7 +42,7 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import {ref, watch, computed, getCurrentInstance, onUnmounted, onMounted, nextTick} from "vue";
|
||||
import {ref, watch, computed, getCurrentInstance, onUnmounted, onMounted} from "vue";
|
||||
import {useDocStore} from "../../stores/doc";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import OpenInNew from "vue-material-design-icons/OpenInNew.vue";
|
||||
@@ -55,7 +55,9 @@
|
||||
import ContextInfoContent from "../ContextInfoContent.vue";
|
||||
import ContextChildTableOfContents from "./ContextChildTableOfContents.vue";
|
||||
|
||||
|
||||
import {useNetwork} from "@vueuse/core"
|
||||
import {useScrollMemory} from "../../composables/useScrollMemory"
|
||||
const {isOnline} = useNetwork()
|
||||
|
||||
import Markdown from "../../components/layout/Markdown.vue";
|
||||
@@ -64,19 +66,18 @@
|
||||
const docStore = useDocStore();
|
||||
const {t} = useI18n({useScope: "global"});
|
||||
|
||||
const docWrapper = ref<HTMLDivElement | null>(null);
|
||||
const contextInfoRef = ref<InstanceType<typeof ContextInfoContent> | null>(null);
|
||||
const docHistory = ref<string[]>([]);
|
||||
const currentHistoryIndex = ref(-1);
|
||||
const ast = ref<any>(undefined);
|
||||
|
||||
const pageMetadata = computed(() => docStore.pageMetadata);
|
||||
const docPath = computed(() => docStore.docPath);
|
||||
|
||||
const routeInfo = computed(() => ({
|
||||
title: pageMetadata.value?.title ?? t("docs"),
|
||||
}));
|
||||
const canGoBack = computed(() => docHistory.value.length > 1 && currentHistoryIndex.value > 0);
|
||||
|
||||
|
||||
const addToHistory = (path: string) => {
|
||||
// Always store the path, even empty ones
|
||||
const pathToAdd = path || "";
|
||||
@@ -179,8 +180,10 @@
|
||||
|
||||
addToHistory(val);
|
||||
refreshPage(val);
|
||||
nextTick(() => docWrapper.value?.scrollTo(0, 0));
|
||||
}, {immediate: true});
|
||||
|
||||
const scrollableElement = computed(() => contextInfoRef.value?.contentRef ?? null)
|
||||
useScrollMemory(ref("context-panel-docs"), scrollableElement as any)
|
||||
</script>
|
||||
|
||||
<style scoped lang="scss">
|
||||
@@ -241,4 +244,4 @@
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
|
||||
@@ -23,9 +23,15 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import {ref} from "vue"
|
||||
import {ref, computed} from "vue"
|
||||
import {useRoute} from "vue-router";
|
||||
import {useScrollMemory} from "../../composables/useScrollMemory";
|
||||
|
||||
const collapsed = ref(false);
|
||||
const route = useRoute();
|
||||
const scrollKey = computed(() => `docs:${route.fullPath}`);
|
||||
|
||||
useScrollMemory(scrollKey, undefined, true);
|
||||
|
||||
</script>
|
||||
|
||||
@@ -224,4 +230,4 @@
|
||||
padding-bottom: 1px !important;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
|
||||
@@ -84,7 +84,7 @@
|
||||
import {useExecutionsStore} from "../../stores/executions";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
|
||||
const props = defineProps<{
|
||||
const props = withDefaults(defineProps<{
|
||||
component: string;
|
||||
execution: {
|
||||
id: string;
|
||||
@@ -95,7 +95,10 @@
|
||||
};
|
||||
};
|
||||
tooltipPosition: string;
|
||||
}>();
|
||||
}>(), {
|
||||
component: "el-button",
|
||||
tooltipPosition: "bottom"
|
||||
});
|
||||
|
||||
const emit = defineEmits<{
|
||||
follow: [];
|
||||
|
||||
@@ -51,6 +51,7 @@
|
||||
refresh: {shown: true, callback: refresh}
|
||||
}"
|
||||
@update-properties="updateDisplayColumns"
|
||||
:defaultScope="defaultScopeFilter"
|
||||
/>
|
||||
</template>
|
||||
|
||||
@@ -70,7 +71,7 @@
|
||||
@selection-change="handleSelectionChange"
|
||||
:selectable="!hidden?.includes('selection') && canCheck"
|
||||
:no-data-text="$t('no_results.executions')"
|
||||
:rowKey="(row: any) => `${row.namespace}-${row.id}`"
|
||||
:rowKey="(row: any) => row.id"
|
||||
>
|
||||
<template #select-actions>
|
||||
<BulkSelect
|
||||
@@ -144,10 +145,7 @@
|
||||
|
||||
<el-form>
|
||||
<ElFormItem :label="$t('execution labels')">
|
||||
<LabelInput
|
||||
:key="executionLabels.map((l) => l.key).join('-')"
|
||||
v-model:labels="executionLabels"
|
||||
/>
|
||||
<LabelInput v-model:labels="executionLabels" />
|
||||
</ElFormItem>
|
||||
</el-form>
|
||||
</el-dialog>
|
||||
@@ -384,7 +382,7 @@
|
||||
import _merge from "lodash/merge";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute, useRouter} from "vue-router";
|
||||
import {ref, computed, onMounted, watch, h, useTemplateRef} from "vue";
|
||||
import {ref, computed, watch, h, useTemplateRef} from "vue";
|
||||
import * as YAML_UTILS from "@kestra-io/ui-libs/flow-yaml-utils";
|
||||
import {ElMessageBox, ElSwitch, ElFormItem, ElAlert, ElCheckbox} from "element-plus";
|
||||
|
||||
@@ -410,8 +408,7 @@
|
||||
import Labels from "../layout/Labels.vue";
|
||||
import DateAgo from "../layout/DateAgo.vue";
|
||||
import DataTable from "../layout/DataTable.vue";
|
||||
import BulkSelect from "../layout/BulkSelect.vue";
|
||||
//@ts-expect-error no declaration file
|
||||
import BulkSelect from "../layout/BulkSelect.vue";
|
||||
import SelectTable from "../layout/SelectTable.vue";
|
||||
import KSFilter from "../filter/components/KSFilter.vue";
|
||||
import Sections from "../dashboard/sections/Sections.vue";
|
||||
@@ -424,14 +421,12 @@
|
||||
import {filterValidLabels} from "./utils";
|
||||
import {useToast} from "../../utils/toast";
|
||||
import {storageKeys} from "../../utils/constants";
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
import {humanizeDuration, invisibleSpace} from "../../utils/filters";
|
||||
import Utils from "../../utils/utils";
|
||||
|
||||
import action from "../../models/action";
|
||||
import permission from "../../models/permission";
|
||||
|
||||
import useRestoreUrl from "../../composables/useRestoreUrl";
|
||||
import useRouteContext from "../../composables/useRouteContext";
|
||||
import {useTableColumns} from "../../composables/useTableColumns";
|
||||
import {useDataTableActions} from "../../composables/useDataTableActions";
|
||||
@@ -463,6 +458,7 @@
|
||||
hidden?: string[] | null;
|
||||
flowId?: string | undefined;
|
||||
namespace?: string | undefined;
|
||||
defaultScopeFilter?: boolean;
|
||||
}>(), {
|
||||
embed: false,
|
||||
filter: true,
|
||||
@@ -475,6 +471,7 @@
|
||||
hidden: null,
|
||||
flowId: undefined,
|
||||
namespace: undefined,
|
||||
defaultScopeFilter: undefined
|
||||
});
|
||||
|
||||
const emit = defineEmits<{
|
||||
@@ -496,7 +493,6 @@
|
||||
const selectedStatus = ref(undefined);
|
||||
const lastRefreshDate = ref(new Date());
|
||||
const unqueueDialogVisible = ref(false);
|
||||
const isDefaultNamespaceAllow = ref(true);
|
||||
const changeStatusDialogVisible = ref(false);
|
||||
const actionOptions = ref<Record<string, any>>({});
|
||||
const dblClickRouteName = ref("executions/update");
|
||||
@@ -614,11 +610,6 @@
|
||||
const routeInfo = computed(() => ({title: t("executions")}));
|
||||
useRouteContext(routeInfo, props.embed);
|
||||
|
||||
const {saveRestoreUrl} = useRestoreUrl({
|
||||
restoreUrl: true,
|
||||
isDefaultNamespaceAllow: isDefaultNamespaceAllow.value
|
||||
});
|
||||
|
||||
const dataTableRef = ref(null);
|
||||
const selectTableRef = useTemplateRef<typeof SelectTable>("selectTable");
|
||||
|
||||
@@ -634,8 +625,7 @@
|
||||
dblClickRouteName: dblClickRouteName.value,
|
||||
embed: props.embed,
|
||||
dataTableRef,
|
||||
loadData: loadData,
|
||||
saveRestoreUrl
|
||||
loadData: loadData
|
||||
});
|
||||
|
||||
const {
|
||||
@@ -1043,31 +1033,6 @@
|
||||
emit("state-count", {runningCount, totalCount});
|
||||
};
|
||||
|
||||
onMounted(() => {
|
||||
const query = {...route.query};
|
||||
let queryHasChanged = false;
|
||||
|
||||
const queryKeys = Object.keys(query);
|
||||
if (props.namespace === undefined && defaultNamespace() && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace();
|
||||
queryHasChanged = true;
|
||||
}
|
||||
|
||||
if (!queryKeys.some(key => key.startsWith("filters[scope]"))) {
|
||||
query["filters[scope][EQUALS]"] = "USER";
|
||||
queryHasChanged = true;
|
||||
}
|
||||
|
||||
if (queryHasChanged) {
|
||||
router.replace({query});
|
||||
}
|
||||
|
||||
if (route.name === "flows/update") {
|
||||
optionalColumns.value = optionalColumns.value.
|
||||
filter(col => col.prop !== "namespace" && col.prop !== "flowId");
|
||||
}
|
||||
});
|
||||
|
||||
watch(isOpenLabelsModal, (opening) => {
|
||||
if (opening) {
|
||||
executionLabels.value = [];
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
v-if="!isExecutionStarted"
|
||||
:execution="execution"
|
||||
/>
|
||||
<el-card id="gantt" shadow="never" v-else-if="execution && executionsStore.flow">
|
||||
<template #header>
|
||||
<el-card id="gantt" shadow="never" :class="{'no-border': !hasValidDate}" v-else-if="execution && executionsStore.flow">
|
||||
<template #header v-if="hasValidDate">
|
||||
<div class="d-flex">
|
||||
<Duration class="th text-end" :histories="execution.state.histories" />
|
||||
<span class="text-end" v-for="(date, i) in dates" :key="i">
|
||||
@@ -234,6 +234,9 @@
|
||||
isExecutionStarted() {
|
||||
return this.execution?.state?.current && !["CREATED", "QUEUED"].includes(this.execution.state.current);
|
||||
},
|
||||
hasValidDate() {
|
||||
return isFinite(this.delta());
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
forwardEvent(type, event) {
|
||||
@@ -443,6 +446,9 @@
|
||||
}
|
||||
}
|
||||
|
||||
.no-border {
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
// To Separate through Line
|
||||
:deep(.vue-recycle-scroller__item-view) {
|
||||
|
||||
@@ -150,7 +150,7 @@ export function useExecutionRoot() {
|
||||
follow();
|
||||
window.addEventListener("popstate", follow);
|
||||
|
||||
dependenciesCount.value = (await flowStore.loadDependencies({namespace: route.params.namespace as string, id: route.params.flowId as string, subtype: "FLOW"})).count;
|
||||
dependenciesCount.value = (await flowStore.loadDependencies({namespace: route.params.namespace as string, id: route.params.flowId as string, subtype: "FLOW"}, true)).count;
|
||||
previousExecutionId.value = route.params.id as string;
|
||||
});
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
<script lang="ts" setup>
|
||||
import {ref, computed, watch, PropType} from "vue";
|
||||
import DateSelect from "./DateSelect.vue";
|
||||
import {useI18n} from "vue-i18n";
|
||||
|
||||
interface TimePreset {
|
||||
value?: string;
|
||||
@@ -64,9 +65,11 @@
|
||||
timeFilterPresets.value.map(preset => preset.value)
|
||||
);
|
||||
|
||||
const {t} = useI18n();
|
||||
|
||||
const customAwarePlaceholder = computed<string | undefined>(() => {
|
||||
if (props.placeholder) return props.placeholder;
|
||||
return props.allowCustom ? "datepicker.custom" : undefined;
|
||||
return props.allowCustom ? t("datepicker.custom") : undefined;
|
||||
});
|
||||
|
||||
const onTimeRangeSelect = (range: string | undefined) => {
|
||||
@@ -92,4 +95,4 @@
|
||||
},
|
||||
{immediate: true}
|
||||
);
|
||||
</script>
|
||||
</script>
|
||||
|
||||
@@ -531,8 +531,9 @@
|
||||
}
|
||||
.content-container {
|
||||
height: calc(100vh - 0px);
|
||||
overflow-y: auto !important;
|
||||
overflow-y: scroll;
|
||||
overflow-x: hidden;
|
||||
scrollbar-gutter: stable;
|
||||
word-wrap: break-word;
|
||||
word-break: break-word;
|
||||
position: relative;
|
||||
@@ -541,19 +542,16 @@
|
||||
|
||||
:deep(.el-collapse) {
|
||||
.el-collapse-item__wrap {
|
||||
overflow-y: auto !important;
|
||||
max-height: none !important;
|
||||
}
|
||||
|
||||
.el-collapse-item__content {
|
||||
overflow-y: auto !important;
|
||||
word-wrap: break-word;
|
||||
word-break: break-word;
|
||||
}
|
||||
}
|
||||
|
||||
:deep(.var-value) {
|
||||
overflow-y: auto !important;
|
||||
word-wrap: break-word;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
@@ -45,6 +45,8 @@
|
||||
searchInputFullWidth?: boolean;
|
||||
legacyQuery?: boolean;
|
||||
readOnly?: boolean;
|
||||
defaultScope?: boolean;
|
||||
defaultTimeRange?: boolean;
|
||||
}>(), {
|
||||
buttons: () => ({}),
|
||||
tableOptions: () => ({}),
|
||||
@@ -53,7 +55,9 @@
|
||||
showSearchInput: true,
|
||||
searchInputFullWidth: false,
|
||||
legacyQuery: false,
|
||||
readOnly: false
|
||||
readOnly: false,
|
||||
defaultScope: undefined,
|
||||
defaultTimeRange: undefined,
|
||||
});
|
||||
|
||||
const emits = defineEmits<{
|
||||
@@ -75,7 +79,9 @@
|
||||
} = useFilters(
|
||||
props.configuration,
|
||||
props.showSearchInput,
|
||||
props.legacyQuery
|
||||
props.legacyQuery,
|
||||
props.defaultScope,
|
||||
props.defaultTimeRange,
|
||||
);
|
||||
|
||||
const {savedFilters, saveFilter, updateSavedFilter, deleteSavedFilter} = useSavedFilters(
|
||||
@@ -166,6 +172,7 @@
|
||||
watch(appliedFilters, (newFilters) => {
|
||||
emits("filter", newFilters);
|
||||
}, {deep: true});
|
||||
|
||||
</script>
|
||||
|
||||
<style lang="scss" scoped>
|
||||
|
||||
84
ui/src/components/filter/composables/useDefaultFilter.ts
Normal file
84
ui/src/components/filter/composables/useDefaultFilter.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import {nextTick, onMounted} from "vue";
|
||||
import {LocationQuery, useRoute, useRouter} from "vue-router";
|
||||
import {useMiscStore} from "override/stores/misc";
|
||||
import {defaultNamespace} from "../../../composables/useNamespaces";
|
||||
|
||||
interface DefaultFilterOptions {
|
||||
namespace?: string;
|
||||
includeTimeRange?: boolean;
|
||||
includeScope?: boolean;
|
||||
legacyQuery?: boolean;
|
||||
}
|
||||
|
||||
const NAMESPACE_FILTER_PREFIX = "filters[namespace]";
|
||||
const SCOPE_FILTER_PREFIX = "filters[scope]";
|
||||
const TIME_RANGE_FILTER_PREFIX = "filters[timeRange]";
|
||||
|
||||
const hasFilterKey = (query: LocationQuery, prefix: string): boolean =>
|
||||
Object.keys(query).some(key => key.startsWith(prefix));
|
||||
|
||||
export function applyDefaultFilters(
|
||||
currentQuery: LocationQuery,
|
||||
{
|
||||
namespace,
|
||||
includeTimeRange,
|
||||
includeScope,
|
||||
legacyQuery,
|
||||
}: DefaultFilterOptions = {}): { query: LocationQuery, change: boolean } {
|
||||
|
||||
if(currentQuery && Object.keys(currentQuery).length > 0) {
|
||||
return {
|
||||
query: currentQuery,
|
||||
change: false,
|
||||
}
|
||||
}
|
||||
|
||||
const query = {...currentQuery};
|
||||
|
||||
if (namespace === undefined && defaultNamespace() && !hasFilterKey(query, NAMESPACE_FILTER_PREFIX)) {
|
||||
query[legacyQuery ? "namespace" : `${NAMESPACE_FILTER_PREFIX}[PREFIX]`] = defaultNamespace();
|
||||
}
|
||||
|
||||
if (includeScope && !hasFilterKey(query, SCOPE_FILTER_PREFIX)) {
|
||||
query[legacyQuery ? "scope" : `${SCOPE_FILTER_PREFIX}[EQUALS]`] = "USER";
|
||||
}
|
||||
|
||||
const TIME_FILTER_KEYS = /startDate|endDate|timeRange/;
|
||||
|
||||
if (includeTimeRange && !Object.keys(query).some(key => TIME_FILTER_KEYS.test(key))) {
|
||||
const defaultDuration = useMiscStore().configs?.chartDefaultDuration ?? "P30D";
|
||||
query[legacyQuery ? "timeRange" : `${TIME_RANGE_FILTER_PREFIX}[EQUALS]`] = defaultDuration;
|
||||
}
|
||||
|
||||
return {query, change: true};
|
||||
}
|
||||
|
||||
export function useDefaultFilter(
|
||||
defaultOptions?: DefaultFilterOptions,
|
||||
) {
|
||||
const route = useRoute();
|
||||
const router = useRouter();
|
||||
|
||||
onMounted(async () => {
|
||||
// wait for router to be ready
|
||||
await nextTick()
|
||||
// wait for the useRestoreUrl to apply its changes
|
||||
await nextTick()
|
||||
// finally add default filter if necessary
|
||||
const {query, change} = applyDefaultFilters(route.query, defaultOptions)
|
||||
if(change) {
|
||||
router.replace({...route, query})
|
||||
}
|
||||
});
|
||||
|
||||
function resetDefaultFilter(){
|
||||
router.replace({
|
||||
...route,
|
||||
query: applyDefaultFilters({}, defaultOptions).query
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
resetDefaultFilter
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,16 @@ import {
|
||||
KV_COMPARATORS
|
||||
} from "../utils/filterTypes";
|
||||
import {usePreAppliedFilters} from "./usePreAppliedFilters";
|
||||
import {useDefaultFilter} from "./useDefaultFilter";
|
||||
|
||||
export function useFilters(configuration: FilterConfiguration, showSearchInput = true, legacyQuery = false) {
|
||||
|
||||
export function useFilters(
|
||||
configuration: FilterConfiguration,
|
||||
showSearchInput = true,
|
||||
legacyQuery = false,
|
||||
defaultScope?: boolean,
|
||||
defaultTimeRange?: boolean
|
||||
) {
|
||||
const router = useRouter();
|
||||
const route = useRoute();
|
||||
|
||||
@@ -28,8 +36,7 @@ export function useFilters(configuration: FilterConfiguration, showSearchInput =
|
||||
const {
|
||||
markAsPreApplied,
|
||||
hasPreApplied,
|
||||
getPreApplied,
|
||||
getAllPreApplied
|
||||
getPreApplied
|
||||
} = usePreAppliedFilters();
|
||||
|
||||
const appendQueryParam = (query: Record<string, any>, key: string, value: string) => {
|
||||
@@ -367,24 +374,24 @@ export function useFilters(configuration: FilterConfiguration, showSearchInput =
|
||||
updateRoute();
|
||||
};
|
||||
|
||||
/**
|
||||
* Resets all filters to their pre-applied state and clears the search query
|
||||
*/
|
||||
const {resetDefaultFilter} = useDefaultFilter({
|
||||
legacyQuery,
|
||||
includeScope: defaultScope ?? configuration.keys?.some((k) => k.key === "scope"),
|
||||
includeTimeRange: defaultTimeRange ?? configuration.keys?.some((k) => k.key === "timeRange"),
|
||||
});
|
||||
|
||||
const resetToPreApplied = () => {
|
||||
appliedFilters.value = getAllPreApplied();
|
||||
searchQuery.value = "";
|
||||
updateRoute();
|
||||
resetDefaultFilter();
|
||||
};
|
||||
|
||||
watch(searchQuery, () => {
|
||||
updateRoute();
|
||||
});
|
||||
|
||||
return {
|
||||
appliedFilters: computed(() => appliedFilters.value),
|
||||
searchQuery: computed({
|
||||
get: () => searchQuery.value,
|
||||
set: value => {
|
||||
searchQuery.value = value;
|
||||
updateRoute();
|
||||
}
|
||||
}),
|
||||
searchQuery,
|
||||
addFilter,
|
||||
removeFilter,
|
||||
updateFilter,
|
||||
|
||||
@@ -43,7 +43,7 @@ export function useValues(label: string | undefined, t?: ReturnType<typeof useI1
|
||||
{label: t("datepicker.last24hours"), value: "PT24H"},
|
||||
{label: t("datepicker.last48hours"), value: "PT48H"},
|
||||
{label: t("datepicker.last7days"), value: "PT168H"},
|
||||
{label: t("datepicker.last30days"), value: "PT720H"},
|
||||
{label: t("datepicker.last30days"), value: "P30D"},
|
||||
{label: t("datepicker.last365days"), value: "PT8760H"},
|
||||
];
|
||||
|
||||
|
||||
@@ -2,13 +2,15 @@ import {computed, ComputedRef} from "vue";
|
||||
import {FilterConfiguration} from "../utils/filterTypes";
|
||||
import {useI18n} from "vue-i18n";
|
||||
|
||||
export const useBlueprintFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useBlueprintFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.blueprint_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_blueprints"),
|
||||
keys: [
|
||||
]
|
||||
};
|
||||
});
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.blueprint_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_blueprints"),
|
||||
keys: [
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -7,152 +7,160 @@ import {useAuthStore} from "override/stores/auth";
|
||||
import {useValues} from "../composables/useValues";
|
||||
import {useI18n} from "vue-i18n";
|
||||
|
||||
export const useDashboardFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useDashboardFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.dashboard_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_dashboards"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.dashboard_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_dashboards"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_dashboard.label"),
|
||||
description: t("filter.timeRange_dashboard.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("dashboard");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_dashboard.label"),
|
||||
description: t("filter.timeRange_dashboard.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("dashboard");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
{
|
||||
key: "state",
|
||||
label: t("filter.state.label"),
|
||||
description: t("filter.state.description"),
|
||||
comparators: [Comparators.IN, Comparators.NOT_IN],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.EXECUTION_STATES;
|
||||
},
|
||||
showComparatorSelection: true
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels.label"),
|
||||
description: t("filter.labels.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "state",
|
||||
label: t("filter.state.label"),
|
||||
description: t("filter.state.description"),
|
||||
comparators: [Comparators.IN, Comparators.NOT_IN],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.EXECUTION_STATES;
|
||||
},
|
||||
showComparatorSelection: true
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels.label"),
|
||||
description: t("filter.labels.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const useNamespaceDashboardFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useNamespaceDashboardFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.namespace_dashboard_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_dashboards"),
|
||||
keys: [
|
||||
{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
// valueProvider: async () => {
|
||||
// const flowStore = useFlowStore();
|
||||
return computed(() => {
|
||||
|
||||
// const flowIds = await flowStore.loadDistinctFlowIds();
|
||||
// return flowIds.map((flowId: string) => ({label: flowId, value: flowId}));
|
||||
// },
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_dashboard.label"),
|
||||
description: t("filter.timeRange_dashboard.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("dashboard");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
return {
|
||||
title: t("filter.titles.namespace_dashboard_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_dashboards"),
|
||||
keys: [
|
||||
{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
// valueProvider: async () => {
|
||||
// const flowStore = useFlowStore();
|
||||
|
||||
// const flowIds = await flowStore.loadDistinctFlowIds();
|
||||
// return flowIds.map((flowId: string) => ({label: flowId, value: flowId}));
|
||||
// },
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_dashboard.label"),
|
||||
description: t("filter.timeRange_dashboard.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("dashboard");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels.label"),
|
||||
description: "Filter by labels",
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels.label"),
|
||||
description: "Filter by labels",
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const useFlowDashboardFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useFlowDashboardFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.flow_dashboard_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_dashboards"),
|
||||
keys: [
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_dashboard.label"),
|
||||
description: t("filter.timeRange_dashboard.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("dashboard");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
return computed(() => {
|
||||
|
||||
return {
|
||||
title: t("filter.titles.flow_dashboard_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_dashboards"),
|
||||
keys: [
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_dashboard.label"),
|
||||
description: t("filter.timeRange_dashboard.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("dashboard");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels.label"),
|
||||
description: t("filter.labels.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels.label"),
|
||||
description: t("filter.labels.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -6,137 +6,143 @@ import {useNamespacesStore} from "override/stores/namespaces";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useValues} from "../composables/useValues";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute} from "vue-router";
|
||||
|
||||
export const useExecutionFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useExecutionFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
const route = useRoute();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.execution_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_executions"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.execution_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_executions"),
|
||||
keys: [
|
||||
...(route.name !== "namespaces/update" ? [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select" as const,
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
] : []) as any,
|
||||
...(route.name !== "flows/update" ? [{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
}] : []) as any,
|
||||
{
|
||||
key: "kind",
|
||||
label: t("filter.kind.label"),
|
||||
description: t("filter.kind.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.KINDS;
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "kind",
|
||||
label: t("filter.kind.label"),
|
||||
description: t("filter.kind.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.KINDS;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "state",
|
||||
label: t("filter.state.label"),
|
||||
description: t("filter.state.description"),
|
||||
comparators: [Comparators.IN, Comparators.NOT_IN],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.EXECUTION_STATES;
|
||||
{
|
||||
key: "state",
|
||||
label: t("filter.state.label"),
|
||||
description: t("filter.state.description"),
|
||||
comparators: [Comparators.IN, Comparators.NOT_IN],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.EXECUTION_STATES;
|
||||
},
|
||||
showComparatorSelection: true,
|
||||
searchable: true
|
||||
},
|
||||
showComparatorSelection: true,
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope.label"),
|
||||
description: t("filter.scope.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.SCOPES;
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope.label"),
|
||||
description: t("filter.scope.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.SCOPES;
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
{
|
||||
key: "childFilter",
|
||||
label: t("filter.childFilter.label"),
|
||||
description: t("filter.childFilter.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.CHILDS;
|
||||
{
|
||||
key: "childFilter",
|
||||
label: t("filter.childFilter.label"),
|
||||
description: t("filter.childFilter.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.CHILDS;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange.label"),
|
||||
description: t("filter.timeRange.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels_execution.label"),
|
||||
description: t("filter.labels_execution.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "triggerExecutionId",
|
||||
label: t("filter.triggerExecutionId.label"),
|
||||
description: t("filter.triggerExecutionId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
searchable: true
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange.label"),
|
||||
description: t("filter.timeRange.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels_execution.label"),
|
||||
description: t("filter.labels_execution.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "triggerExecutionId",
|
||||
label: t("filter.triggerExecutionId.label"),
|
||||
description: t("filter.triggerExecutionId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
searchable: true
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -3,90 +3,92 @@ import {FilterConfiguration, Comparators} from "../utils/filterTypes";
|
||||
import {useValues} from "../composables/useValues";
|
||||
import {useI18n} from "vue-i18n";
|
||||
|
||||
export const useFlowExecutionFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useFlowExecutionFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.flow_execution_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_executions"),
|
||||
keys: [
|
||||
{
|
||||
key: "state",
|
||||
label: t("filter.state.label"),
|
||||
description: t("filter.state.description"),
|
||||
comparators: [Comparators.IN, Comparators.NOT_IN],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.EXECUTION_STATES;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope.label"),
|
||||
description: t("filter.scope.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.SCOPES;
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.flow_execution_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_executions"),
|
||||
keys: [
|
||||
{
|
||||
key: "state",
|
||||
label: t("filter.state.label"),
|
||||
description: t("filter.state.description"),
|
||||
comparators: [Comparators.IN, Comparators.NOT_IN],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.EXECUTION_STATES;
|
||||
}
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
{
|
||||
key: "childFilter",
|
||||
label: t("filter.childFilter.label"),
|
||||
description: t("filter.childFilter.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.CHILDS;
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope.label"),
|
||||
description: t("filter.scope.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.SCOPES;
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
{
|
||||
key: "childFilter",
|
||||
label: t("filter.childFilter.label"),
|
||||
description: t("filter.childFilter.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.CHILDS;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "kind",
|
||||
label: t("filter.kind.label"),
|
||||
description: t("filter.kind.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.KINDS;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange.label"),
|
||||
description: t("filter.timeRange.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels_execution.label"),
|
||||
description: t("filter.labels_execution.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "triggerExecutionId",
|
||||
label: t("filter.triggerExecutionId.label"),
|
||||
description: t("filter.triggerExecutionId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
searchable: true
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "kind",
|
||||
label: t("filter.kind.label"),
|
||||
description: t("filter.kind.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.KINDS;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange.label"),
|
||||
description: t("filter.timeRange.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("executions");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "labels",
|
||||
label: t("filter.labels_execution.label"),
|
||||
description: t("filter.labels_execution.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "triggerExecutionId",
|
||||
label: t("filter.triggerExecutionId.label"),
|
||||
description: t("filter.triggerExecutionId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
searchable: true
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -6,45 +6,49 @@ import {useNamespacesStore} from "override/stores/namespaces";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useValues} from "../composables/useValues";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute} from "vue-router";
|
||||
|
||||
export const useFlowFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useFlowFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
const route = useRoute();
|
||||
|
||||
return {
|
||||
return computed(() => ({
|
||||
title: t("filter.titles.flow_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_flows"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
...(route.name !== "namespaces/update" ? [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select" as const,
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
] : []) as any,
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope_flow.label"),
|
||||
@@ -65,5 +69,5 @@ export const useFlowFilter = (): ComputedRef<FilterConfiguration> => computed(()
|
||||
valueType: "text",
|
||||
},
|
||||
]
|
||||
};
|
||||
});
|
||||
}));
|
||||
};
|
||||
@@ -3,47 +3,53 @@ import {Comparators, FilterConfiguration} from "../utils/filterTypes";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useNamespacesStore} from "override/stores/namespaces";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useRoute} from "vue-router";
|
||||
import permission from "../../../models/permission";
|
||||
import action from "../../../models/action";
|
||||
|
||||
export const useKvFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useKvFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
const route = useRoute();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.kv_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_kv"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.kv_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_kv"),
|
||||
keys: [
|
||||
...(route.name !== "namespaces/update" ? [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select" as const,
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
}
|
||||
],
|
||||
};
|
||||
});
|
||||
] : []) as any,
|
||||
],
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
@@ -3,24 +3,26 @@ import {FilterConfiguration, Comparators} from "../utils/filterTypes";
|
||||
import {useValues} from "../composables/useValues";
|
||||
import {useI18n} from "vue-i18n";
|
||||
|
||||
export const useLogExecutionsFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useLogExecutionsFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.log_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_logs"),
|
||||
keys: [
|
||||
{
|
||||
key: "level",
|
||||
label: t("filter.level.label"),
|
||||
description: t("filter.level.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.LEVELS;
|
||||
},
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.log_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_logs"),
|
||||
keys: [
|
||||
{
|
||||
key: "level",
|
||||
label: t("filter.level.label"),
|
||||
description: t("filter.level.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.LEVELS;
|
||||
},
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -6,108 +6,114 @@ import {useNamespacesStore} from "override/stores/namespaces";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useValues} from "../composables/useValues";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute} from "vue-router";
|
||||
|
||||
export const useLogFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useLogFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.log_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_logs"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "level",
|
||||
label: t("filter.level.label"),
|
||||
description: t("filter.level.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.LEVELS;
|
||||
},
|
||||
showComparatorSelection: true
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_log.label"),
|
||||
description: t("filter.timeRange_log.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope_log.label"),
|
||||
description: t("filter.scope_log.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.SCOPES;
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
{
|
||||
key: "triggerId",
|
||||
label: t("filter.triggerId.label"),
|
||||
description: t("filter.triggerId.description"),
|
||||
comparators: [
|
||||
// Comparators.IN,
|
||||
// Comparators.NOT_IN,
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
},
|
||||
]
|
||||
};
|
||||
});
|
||||
const route = useRoute();
|
||||
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.log_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_logs"),
|
||||
keys: [
|
||||
...(route.name !== "namespaces/update" && route.name !== "flows/update" ? [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select" as const,
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
] : []) as any,
|
||||
{
|
||||
key: "level",
|
||||
label: t("filter.level.label"),
|
||||
description: t("filter.level.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.LEVELS;
|
||||
},
|
||||
showComparatorSelection: true
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_log.label"),
|
||||
description: t("filter.timeRange_log.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope_log.label"),
|
||||
description: t("filter.scope_log.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("logs");
|
||||
return VALUES.SCOPES;
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
{
|
||||
key: "triggerId",
|
||||
label: t("filter.triggerId.label"),
|
||||
description: t("filter.triggerId.description"),
|
||||
comparators: [
|
||||
// Comparators.IN,
|
||||
// Comparators.NOT_IN,
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
},
|
||||
...(route.name !== "flows/update" ? [{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
}] : []) as any,
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -5,94 +5,98 @@ import {useFlowStore} from "../../../stores/flow";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useExecutionsStore} from "../../../stores/executions";
|
||||
|
||||
export const useMetricFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useMetricFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.metric_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_metrics"),
|
||||
keys: [
|
||||
{
|
||||
key: "metric",
|
||||
label: t("filter.metric.label"),
|
||||
description: t("filter.metric.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const executionsStore = useExecutionsStore();
|
||||
const taskRuns = executionsStore.execution?.taskRunList ?? [];
|
||||
return taskRuns.map(taskRun => ({
|
||||
label: taskRun.taskId + (taskRun.value ? ` - ${taskRun.value}` : ""),
|
||||
value: taskRun.id
|
||||
}));
|
||||
},
|
||||
searchable: true
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.metric_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_metrics"),
|
||||
keys: [
|
||||
{
|
||||
key: "metric",
|
||||
label: t("filter.metric.label"),
|
||||
description: t("filter.metric.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const executionsStore = useExecutionsStore();
|
||||
const taskRuns = executionsStore.execution?.taskRunList ?? [];
|
||||
return taskRuns.map(taskRun => ({
|
||||
label: taskRun.taskId + (taskRun.value ? ` - ${taskRun.value}` : ""),
|
||||
value: taskRun.id
|
||||
}));
|
||||
},
|
||||
searchable: true
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const useFlowMetricFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useFlowMetricFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.flow_metric_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_metrics"),
|
||||
keys: [
|
||||
{
|
||||
key: "task",
|
||||
label: t("filter.task.label"),
|
||||
description: t("filter.task.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
return (useFlowStore().tasksWithMetrics as string[]).map((value) => ({
|
||||
label: value,
|
||||
value
|
||||
}));
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.flow_metric_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_metrics"),
|
||||
keys: [
|
||||
{
|
||||
key: "task",
|
||||
label: t("filter.task.label"),
|
||||
description: t("filter.task.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
return (useFlowStore().tasksWithMetrics as string[]).map((value) => ({
|
||||
label: value,
|
||||
value
|
||||
}));
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "metric",
|
||||
label: t("filter.metric.label"),
|
||||
description: t("filter.metric.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS
|
||||
],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
return (useFlowStore().metrics as string[]).map((value) => ({
|
||||
label: value,
|
||||
value
|
||||
}));
|
||||
{
|
||||
key: "metric",
|
||||
label: t("filter.metric.label"),
|
||||
description: t("filter.metric.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS
|
||||
],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
return (useFlowStore().metrics as string[]).map((value) => ({
|
||||
label: value,
|
||||
value
|
||||
}));
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "aggregation",
|
||||
label: t("filter.aggregation.label"),
|
||||
description: t("filter.aggregation.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("metrics");
|
||||
return [...VALUES.AGGREGATIONS, {label: "Count", value: "COUNT"}];
|
||||
{
|
||||
key: "aggregation",
|
||||
label: t("filter.aggregation.label"),
|
||||
description: t("filter.aggregation.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("metrics");
|
||||
return [...VALUES.AGGREGATIONS, {label: "Count", value: "COUNT"}];
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_metric.label"),
|
||||
description: t("filter.timeRange_metric.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("metrics");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_metric.label"),
|
||||
description: t("filter.timeRange_metric.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("metrics");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -2,12 +2,14 @@ import {computed, ComputedRef} from "vue";
|
||||
import {FilterConfiguration} from "../../../components/filter/utils/filterTypes";
|
||||
import {useI18n} from "vue-i18n";
|
||||
|
||||
export const useNamespacesFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useNamespacesFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.namespaces_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_namespaces"),
|
||||
keys: [],
|
||||
};
|
||||
});
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.namespace_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_namespaces"),
|
||||
keys: [],
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
@@ -2,12 +2,14 @@ import {computed, ComputedRef} from "vue";
|
||||
import {FilterConfiguration} from "../../../components/filter/utils/filterTypes";
|
||||
import {useI18n} from "vue-i18n";
|
||||
|
||||
export const usePluginFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const usePluginFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.plugin_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_plugins", {count: 900}),
|
||||
keys: [],
|
||||
};
|
||||
});
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.plugin_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_plugins", {count: 900}),
|
||||
keys: [],
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -5,45 +5,51 @@ import action from "../../../models/action";
|
||||
import {useNamespacesStore} from "override/stores/namespaces";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute} from "vue-router";
|
||||
|
||||
export const useSecretsFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useSecretsFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
const route = useRoute();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.secret_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_secrets"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.secret_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_secrets"),
|
||||
keys: [
|
||||
...(route.name !== "namespaces/update" ? [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
] : []) as any,
|
||||
],
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -6,113 +6,118 @@ import {useNamespacesStore} from "override/stores/namespaces";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useValues} from "../composables/useValues";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute} from "vue-router";
|
||||
|
||||
export const useTriggerFilter = (): ComputedRef<FilterConfiguration> => computed(() => {
|
||||
export const useTriggerFilter = (): ComputedRef<FilterConfiguration> => {
|
||||
const {t} = useI18n();
|
||||
const route = useRoute();
|
||||
|
||||
return {
|
||||
title: t("filter.titles.trigger_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_triggers"),
|
||||
keys: [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select",
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
return computed(() => {
|
||||
return {
|
||||
title: t("filter.titles.trigger_filters"),
|
||||
searchPlaceholder: t("filter.search_placeholders.search_triggers"),
|
||||
keys: [
|
||||
...(route.name !== "namespaces/update" ? [
|
||||
{
|
||||
key: "namespace",
|
||||
label: t("filter.namespace.label"),
|
||||
description: t("filter.namespace.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.PREFIX,
|
||||
],
|
||||
valueType: "multi-select" as const,
|
||||
valueProvider: async () => {
|
||||
const user = useAuthStore().user;
|
||||
if (user && user.hasAnyActionOnAnyNamespace(permission.NAMESPACE, action.READ)) {
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const namespaces = (await namespacesStore.loadAutocomplete()) as string[];
|
||||
return [...new Set(namespaces
|
||||
.flatMap(namespace => {
|
||||
return namespace.split(".").reduce((current: string[], part: string) => {
|
||||
const previousCombination = current?.[current.length - 1];
|
||||
return [...current, `${(previousCombination ? previousCombination + "." : "")}${part}`];
|
||||
}, []);
|
||||
}))].map(namespace => ({
|
||||
label: namespace,
|
||||
value: namespace
|
||||
}));
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
] : []) as any,
|
||||
...(route.name !== "flows/update" ? [{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
}] : []) as any,
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_trigger.label"),
|
||||
description: t("filter.timeRange_trigger.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("triggers");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
}
|
||||
return [];
|
||||
},
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
key: "flowId",
|
||||
label: t("filter.flowId.label"),
|
||||
description: t("filter.flowId.description"),
|
||||
comparators: [
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH,
|
||||
],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "timeRange",
|
||||
label: t("filter.timeRange_trigger.label"),
|
||||
description: t("filter.timeRange_trigger.description"),
|
||||
comparators: [Comparators.EQUALS],
|
||||
valueType: "select",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("triggers");
|
||||
return VALUES.RELATIVE_DATE;
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope_trigger.label"),
|
||||
description: t("filter.scope_trigger.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("triggers");
|
||||
return VALUES.SCOPES;
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
{
|
||||
key: "triggerId",
|
||||
label: t("filter.triggerId_trigger.label"),
|
||||
description: t("filter.triggerId_trigger.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "workerId",
|
||||
label: t("filter.workerId.label"),
|
||||
description: t("filter.workerId.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
searchable: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
key: "scope",
|
||||
label: t("filter.scope_trigger.label"),
|
||||
description: t("filter.scope_trigger.description"),
|
||||
comparators: [Comparators.EQUALS, Comparators.NOT_EQUALS],
|
||||
valueType: "radio",
|
||||
valueProvider: async () => {
|
||||
const {VALUES} = useValues("triggers");
|
||||
return VALUES.SCOPES;
|
||||
},
|
||||
showComparatorSelection: false
|
||||
},
|
||||
{
|
||||
key: "triggerId",
|
||||
label: t("filter.triggerId_trigger.label"),
|
||||
description: t("filter.triggerId_trigger.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
},
|
||||
{
|
||||
key: "workerId",
|
||||
label: t("filter.workerId.label"),
|
||||
description: t("filter.workerId.description"),
|
||||
comparators: [
|
||||
Comparators.IN,
|
||||
Comparators.NOT_IN,
|
||||
Comparators.EQUALS,
|
||||
Comparators.NOT_EQUALS,
|
||||
Comparators.CONTAINS,
|
||||
Comparators.STARTS_WITH,
|
||||
Comparators.ENDS_WITH
|
||||
],
|
||||
valueType: "text",
|
||||
// valueProvider: async () => {},
|
||||
searchable: true,
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
]
|
||||
};
|
||||
});
|
||||
};
|
||||
@@ -39,7 +39,7 @@ export const decodeSearchParams = (query: LocationQuery) =>
|
||||
operation
|
||||
};
|
||||
})
|
||||
.filter(Boolean);
|
||||
.filter(v => v !== null);
|
||||
|
||||
type Filter = Pick<AppliedFilter, "key" | "comparator" | "value">;
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
:namespace="flowStore.flow?.namespace"
|
||||
:flowId="flowStore.flow?.id"
|
||||
:topbar="false"
|
||||
:restoreUrl="false"
|
||||
:defaultScopeFilter="false"
|
||||
filter
|
||||
/>
|
||||
</template>
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
refresh: {shown: true, callback: load}
|
||||
}"
|
||||
legacyQuery
|
||||
:defaultScope="false"
|
||||
:defaultTimeRange="false"
|
||||
/>
|
||||
|
||||
<div v-bind="$attrs" v-loading="isLoading">
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
import FlowRootTopBar from "./FlowRootTopBar.vue";
|
||||
import FlowConcurrency from "./FlowConcurrency.vue";
|
||||
import DemoAuditLogs from "../demo/AuditLogs.vue";
|
||||
import {useAuthStore} from "override/stores/auth"
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useMiscStore} from "override/stores/misc";
|
||||
|
||||
export default {
|
||||
@@ -59,13 +59,12 @@
|
||||
"$route.params.tab": {
|
||||
immediate: true,
|
||||
handler: function (newTab) {
|
||||
if (newTab === "overview") {
|
||||
if (newTab === "overview" || newTab === "executions") {
|
||||
const dateTimeKeys = ["startDate", "endDate", "timeRange"];
|
||||
|
||||
if (!Object.keys(this.$route.query).some((key) => dateTimeKeys.some((dateTimeKey) => key.includes(dateTimeKey)))) {
|
||||
const miscStore = useMiscStore();
|
||||
const defaultDuration = miscStore.configs?.chartDefaultDuration || "P30D";
|
||||
const newQuery = {...this.$route.query, "filters[timeRange][EQUALS]": defaultDuration};
|
||||
const DEFAULT_DURATION = this.miscStore.configs?.chartDefaultDuration ?? "P30D";
|
||||
const newQuery = {...this.$route.query, "filters[timeRange][EQUALS]": DEFAULT_DURATION};
|
||||
this.$router.replace({name: this.$route.name, params: this.$route.params, query: newQuery});
|
||||
}
|
||||
}
|
||||
@@ -314,7 +313,7 @@
|
||||
}
|
||||
},
|
||||
computed: {
|
||||
...mapStores(useCoreStore, useFlowStore, useAuthStore),
|
||||
...mapStores(useCoreStore, useFlowStore, useAuthStore, useMiscStore),
|
||||
routeInfo() {
|
||||
return {
|
||||
title: this.$route.params.id,
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
@update-properties="updateDisplayColumns"
|
||||
legacyQuery
|
||||
readOnly
|
||||
:defaultScope="false"
|
||||
:defaultTimeRange="false"
|
||||
/>
|
||||
|
||||
<el-table
|
||||
@@ -472,7 +474,7 @@
|
||||
backfill: cleanBackfill.value
|
||||
})
|
||||
.then((newTrigger: any) => {
|
||||
(window as any).$toast().saved(newTrigger.id);
|
||||
toast.saved(newTrigger.triggerId);
|
||||
triggers.value = triggers.value.map((t: any) => {
|
||||
if (t.id === newTrigger.id) {
|
||||
return newTrigger
|
||||
@@ -493,7 +495,7 @@
|
||||
const pauseBackfill = (trigger: any) => {
|
||||
triggerStore.pauseBackfill(trigger)
|
||||
.then((newTrigger: any) => {
|
||||
toast.saved(newTrigger.id);
|
||||
toast.saved(newTrigger.triggerId);
|
||||
triggers.value = triggers.value.map((t: any) => {
|
||||
if (t.id === newTrigger.id) {
|
||||
return newTrigger
|
||||
@@ -506,7 +508,7 @@
|
||||
const unpauseBackfill = (trigger: any) => {
|
||||
triggerStore.unpauseBackfill(trigger)
|
||||
.then((newTrigger: any) => {
|
||||
toast.saved(newTrigger.id);
|
||||
toast.saved(newTrigger.triggerId);
|
||||
triggers.value = triggers.value.map((t: any) => {
|
||||
if (t.id === newTrigger.id) {
|
||||
return newTrigger
|
||||
@@ -519,7 +521,7 @@
|
||||
const deleteBackfill = (trigger: any) => {
|
||||
triggerStore.deleteBackfill(trigger)
|
||||
.then((newTrigger: any) => {
|
||||
toast.saved(newTrigger.id);
|
||||
toast.saved(newTrigger.triggerId);
|
||||
triggers.value = triggers.value.map((t: any) => {
|
||||
if (t.id === newTrigger.id) {
|
||||
return newTrigger
|
||||
@@ -532,7 +534,7 @@
|
||||
const setDisabled = (trigger: any, value: boolean) => {
|
||||
triggerStore.update({...trigger, disabled: !value})
|
||||
.then((newTrigger: any) => {
|
||||
toast.saved(newTrigger.id);
|
||||
toast.saved(newTrigger.triggerId);
|
||||
triggers.value = triggers.value.map((t: any) => {
|
||||
if (t.id === newTrigger.id) {
|
||||
return newTrigger
|
||||
@@ -548,7 +550,7 @@
|
||||
flowId: trigger.flowId,
|
||||
triggerId: trigger.triggerId
|
||||
}).then((newTrigger: any) => {
|
||||
toast.saved(newTrigger.id);
|
||||
toast.saved(newTrigger.triggerId);
|
||||
triggers.value = triggers.value.map((t: any) => {
|
||||
if (t.id === newTrigger.id) {
|
||||
return newTrigger
|
||||
@@ -564,7 +566,7 @@
|
||||
flowId: trigger.flowId,
|
||||
triggerId: trigger.triggerId
|
||||
}).then((newTrigger: any) => {
|
||||
toast.saved(newTrigger.id);
|
||||
toast.saved(newTrigger.triggerId);
|
||||
triggers.value = triggers.value.map((t: any) => {
|
||||
if (t.id === newTrigger.id) {
|
||||
return newTrigger
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
refresh: {shown: true, callback: refresh}
|
||||
}"
|
||||
@update-properties="updateDisplayColumns"
|
||||
:defaultScope="!route.name?.toString().startsWith('namespaces/')"
|
||||
/>
|
||||
</template>
|
||||
|
||||
@@ -204,6 +205,7 @@
|
||||
<template #default="scope">
|
||||
<TimeSeries
|
||||
:chart="mappedChart(scope.row.id, scope.row.namespace)"
|
||||
:filters="chartFilters()"
|
||||
showDefault
|
||||
short
|
||||
/>
|
||||
@@ -248,8 +250,8 @@
|
||||
|
||||
|
||||
<script setup lang="ts">
|
||||
import {ref, computed, onMounted, useTemplateRef} from "vue";
|
||||
import {useRoute, useRouter} from "vue-router";
|
||||
import {ref, computed, useTemplateRef} from "vue";
|
||||
import {useRoute} from "vue-router";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import _merge from "lodash/merge";
|
||||
import * as FILTERS from "../../utils/filters";
|
||||
@@ -272,7 +274,6 @@
|
||||
import TriggerAvatar from "./TriggerAvatar.vue";
|
||||
import DataTable from "../layout/DataTable.vue";
|
||||
import BulkSelect from "../layout/BulkSelect.vue";
|
||||
//@ts-expect-error no declaration file
|
||||
import SelectTable from "../layout/SelectTable.vue";
|
||||
import KSFilter from "../filter/components/KSFilter.vue";
|
||||
import MarkdownTooltip from "../layout/MarkdownTooltip.vue";
|
||||
@@ -283,17 +284,16 @@
|
||||
import permission from "../../models/permission";
|
||||
|
||||
import {useToast} from "../../utils/toast";
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
|
||||
import {useFlowStore} from "../../stores/flow";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useMiscStore} from "override/stores/misc";
|
||||
import {useExecutionsStore} from "../../stores/executions";
|
||||
|
||||
import {useTableColumns} from "../../composables/useTableColumns";
|
||||
import {DataTableRef, useDataTableActions} from "../../composables/useDataTableActions";
|
||||
import {useSelectTableActions} from "../../composables/useSelectTableActions";
|
||||
|
||||
|
||||
const props = withDefaults(defineProps<{
|
||||
topbar?: boolean;
|
||||
namespace?: string;
|
||||
@@ -307,9 +307,9 @@
|
||||
const flowStore = useFlowStore();
|
||||
const authStore = useAuthStore();
|
||||
const executionsStore = useExecutionsStore();
|
||||
const miscStore = useMiscStore();
|
||||
|
||||
const route = useRoute();
|
||||
const router = useRouter();
|
||||
|
||||
const {t} = useI18n();
|
||||
const toast = useToast()
|
||||
@@ -622,24 +622,14 @@
|
||||
return MAPPED_CHARTS;
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
const query = {...route.query};
|
||||
const queryKeys = Object.keys(query);
|
||||
let queryHasChanged = false;
|
||||
|
||||
if (props.namespace === undefined && defaultNamespace() && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace();
|
||||
queryHasChanged = true;
|
||||
}
|
||||
|
||||
if (!queryKeys.some(key => key.startsWith("filters[scope]"))) {
|
||||
query["filters[scope][EQUALS]"] = "USER";
|
||||
queryHasChanged = true;
|
||||
}
|
||||
|
||||
if (queryHasChanged) router.replace({query});
|
||||
});
|
||||
|
||||
function chartFilters() {
|
||||
const DEFAULT_DURATION = miscStore.configs?.chartDefaultDuration ?? "P30D";
|
||||
return [{
|
||||
field: "timeRange",
|
||||
value: DEFAULT_DURATION,
|
||||
operation: "EQUALS"
|
||||
}];
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped lang="scss">
|
||||
|
||||
@@ -56,7 +56,6 @@
|
||||
import DataTable from "../layout/DataTable.vue";
|
||||
import SearchField from "../layout/SearchField.vue";
|
||||
import NamespaceSelect from "../namespaces/components/NamespaceSelect.vue";
|
||||
import useRestoreUrl from "../../composables/useRestoreUrl";
|
||||
import useRouteContext from "../../composables/useRouteContext";
|
||||
import {useDataTableActions} from "../../composables/useDataTableActions";
|
||||
|
||||
@@ -77,11 +76,9 @@
|
||||
}));
|
||||
|
||||
useRouteContext(routeInfo);
|
||||
const {saveRestoreUrl} = useRestoreUrl({restoreUrl: true, isDefaultNamespaceAllow: true});
|
||||
|
||||
const {onPageChanged, onDataTableValue, queryWithFilter, ready} = useDataTableActions({
|
||||
loadData,
|
||||
saveRestoreUrl
|
||||
loadData
|
||||
});
|
||||
|
||||
const namespace = computed({
|
||||
|
||||
@@ -83,6 +83,7 @@
|
||||
/* eslint-disable vue/enforce-style-attribute */
|
||||
import {computed, onMounted, ref, shallowRef, watch} from "vue";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useThrottleFn} from "@vueuse/core";
|
||||
import UnfoldLessHorizontal from "vue-material-design-icons/UnfoldLessHorizontal.vue";
|
||||
import UnfoldMoreHorizontal from "vue-material-design-icons/UnfoldMoreHorizontal.vue";
|
||||
import Help from "vue-material-design-icons/Help.vue";
|
||||
@@ -94,6 +95,7 @@
|
||||
import {TabFocus} from "monaco-editor/esm/vs/editor/browser/config/tabFocus";
|
||||
import MonacoEditor from "./MonacoEditor.vue";
|
||||
import type * as monaco from "monaco-editor/esm/vs/editor/editor.api";
|
||||
import {useScrollMemory} from "../../composables/useScrollMemory";
|
||||
|
||||
const {t} = useI18n()
|
||||
|
||||
@@ -123,6 +125,7 @@
|
||||
shouldFocus: {type: Boolean, default: true},
|
||||
showScroll: {type: Boolean, default: false},
|
||||
diffOverviewBar: {type: Boolean, default: true},
|
||||
scrollKey: {type: String, default: undefined},
|
||||
})
|
||||
|
||||
defineOptions({
|
||||
@@ -312,6 +315,29 @@
|
||||
return
|
||||
}
|
||||
|
||||
const codeEditor = editor as monaco.editor.IStandaloneCodeEditor;
|
||||
const scrollMemory = props.scrollKey ? useScrollMemory(ref(props.scrollKey)) : null;
|
||||
|
||||
if (props.scrollKey && scrollMemory) {
|
||||
const savedState = scrollMemory.loadData<monaco.editor.ICodeEditorViewState>("viewState");
|
||||
if (savedState) {
|
||||
codeEditor.restoreViewState(savedState);
|
||||
codeEditor.revealLineInCenterIfOutsideViewport?.(codeEditor.getPosition()?.lineNumber ?? 1);
|
||||
}
|
||||
|
||||
const top = scrollMemory.loadData<number>("scrollTop", 0);
|
||||
if (typeof top === "number") {
|
||||
codeEditor.setScrollTop(top);
|
||||
}
|
||||
|
||||
const throttledSave = useThrottleFn(() => {
|
||||
scrollMemory.saveData(codeEditor.saveViewState(), "viewState");
|
||||
scrollMemory.saveData(codeEditor.getScrollTop(), "scrollTop");
|
||||
}, 100);
|
||||
|
||||
codeEditor.onDidScrollChange?.(throttledSave);
|
||||
}
|
||||
|
||||
if (!isDiff.value) {
|
||||
editor.onDidBlurEditorWidget?.(() => {
|
||||
emit("focusout", isCodeEditor(editor)
|
||||
@@ -468,6 +494,10 @@
|
||||
position: position,
|
||||
model: model,
|
||||
});
|
||||
// Save view state when cursor changes
|
||||
if (scrollMemory) {
|
||||
scrollMemory.saveData(codeEditor.saveViewState(), "viewState");
|
||||
}
|
||||
}, 100) as unknown as number;
|
||||
highlightPebble();
|
||||
});
|
||||
|
||||
@@ -59,12 +59,12 @@
|
||||
const {t} = useI18n();
|
||||
|
||||
const exportYaml = () => {
|
||||
const src = flowStore.flowYaml
|
||||
if(!src) {
|
||||
return;
|
||||
}
|
||||
const blob = new Blob([src], {type: "text/yaml"});
|
||||
localUtils.downloadUrl(window.URL.createObjectURL(blob), "flow.yaml");
|
||||
if(!flowStore.flow || !flowStore.flowYaml) return;
|
||||
|
||||
const {id, namespace} = flowStore.flow;
|
||||
const blob = new Blob([flowStore.flowYaml], {type: "text/yaml"});
|
||||
|
||||
localUtils.downloadUrl(window.URL.createObjectURL(blob), `${namespace}.${id}.yaml`);
|
||||
};
|
||||
|
||||
const flowStore = useFlowStore();
|
||||
@@ -109,24 +109,31 @@
|
||||
const onSaveAll = inject(FILES_SAVE_ALL_INJECTION_KEY);
|
||||
|
||||
async function save(){
|
||||
// Save the isCreating before saving.
|
||||
// saveAll can change its value.
|
||||
const isCreating = flowStore.isCreating
|
||||
await flowStore.saveAll()
|
||||
try {
|
||||
// Save the isCreating before saving.
|
||||
// saveAll can change its value.
|
||||
const isCreating = flowStore.isCreating
|
||||
await flowStore.saveAll()
|
||||
|
||||
if(isCreating){
|
||||
await router.push({
|
||||
name: "flows/update",
|
||||
params: {
|
||||
id: flowStore.flow?.id,
|
||||
namespace: flowStore.flow?.namespace,
|
||||
tab: "edit",
|
||||
tenant: routeParams.value.tenant,
|
||||
},
|
||||
});
|
||||
if(isCreating){
|
||||
await router.push({
|
||||
name: "flows/update",
|
||||
params: {
|
||||
id: flowStore.flow?.id,
|
||||
namespace: flowStore.flow?.namespace,
|
||||
tab: "edit",
|
||||
tenant: routeParams.value.tenant,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
onSaveAll?.();
|
||||
} catch (error: any) {
|
||||
if (error?.status === 401) {
|
||||
toast.error("401 Unauthorized", undefined, {duration: 2000});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
onSaveAll?.();
|
||||
}
|
||||
|
||||
const deleteFlow = () => {
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
:creating="isCreating"
|
||||
:path="path"
|
||||
:diffOverviewBar="false"
|
||||
:scrollKey="editorScrollKey"
|
||||
@update:model-value="editorUpdate"
|
||||
@cursor="updatePluginDocumentation"
|
||||
@save="flow ? saveFlowYaml(): saveFileContent()"
|
||||
@@ -224,6 +225,19 @@
|
||||
const namespacesStore = useNamespacesStore();
|
||||
const miscStore = useMiscStore();
|
||||
|
||||
const editorScrollKey = computed(() => {
|
||||
if (props.flow) {
|
||||
const ns = flowStore.flow?.namespace ?? "";
|
||||
const id = flowStore.flow?.id ?? "";
|
||||
return `flow:${ns}/${id}:code`;
|
||||
}
|
||||
const ns = namespace.value;
|
||||
if (ns && props.path) {
|
||||
return `file:${ns}:${props.path}`;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
function loadPluginsHash() {
|
||||
miscStore.loadConfigs().then(config => {
|
||||
hash.value = config.pluginsHash;
|
||||
|
||||
@@ -463,7 +463,7 @@
|
||||
for (const item of itemsArr) {
|
||||
const fullPath = `${parentPath}${item.fileName}`;
|
||||
result.push({path: fullPath, fileName: item.fileName, id: item.id});
|
||||
if (isDirectory(item) && item.children.length > 0) {
|
||||
if (isDirectory(item) && item.children?.length > 0) {
|
||||
result.push(...flattenTree(item.children, `${fullPath}/`));
|
||||
}
|
||||
}
|
||||
@@ -688,21 +688,22 @@
|
||||
|
||||
async function removeItems() {
|
||||
if(confirmation.value.nodes === undefined) return;
|
||||
for (const node of confirmation.value.nodes) {
|
||||
await Promise.all(confirmation.value.nodes.map(async (node, i) => {
|
||||
const path = filesStore.getPath(node.id) ?? "";
|
||||
try {
|
||||
await namespacesStore.deleteFileDirectory({
|
||||
namespace: props.currentNS ?? route.params.namespace as string,
|
||||
path: filesStore.getPath(node) ?? "",
|
||||
path,
|
||||
});
|
||||
tree.value.remove(node.id);
|
||||
closeTab?.({
|
||||
path: filesStore.getPath(node) ?? "",
|
||||
path,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Failed to delete file: ${node.fileName}`, error);
|
||||
toast.error(`Failed to delete file: ${node.fileName}`);
|
||||
}
|
||||
}
|
||||
}));
|
||||
confirmation.value = {visible: false, nodes: []};
|
||||
toast.success("Selected files deleted successfully.");
|
||||
}
|
||||
|
||||
@@ -304,6 +304,7 @@
|
||||
const suggestWidgetResizeObserver = ref<MutationObserver>()
|
||||
const suggestWidgetObserver = ref<MutationObserver>()
|
||||
const suggestWidget = ref<HTMLElement>()
|
||||
const resizeObserver = ref<ResizeObserver>()
|
||||
|
||||
defineExpose({
|
||||
focus,
|
||||
@@ -871,6 +872,20 @@
|
||||
setTimeout(() => monaco.editor.remeasureFonts(), 1)
|
||||
emit("editorDidMount", editorResolved.value);
|
||||
|
||||
/* Hhandle resizing. */
|
||||
resizeObserver.value = new ResizeObserver(() => {
|
||||
if (localEditor.value) {
|
||||
localEditor.value.layout();
|
||||
}
|
||||
if (localDiffEditor.value) {
|
||||
localDiffEditor.value.getModifiedEditor().layout();
|
||||
localDiffEditor.value.getOriginalEditor().layout();
|
||||
}
|
||||
});
|
||||
if (editorRef.value) {
|
||||
resizeObserver.value.observe(editorRef.value);
|
||||
}
|
||||
|
||||
highlightLine();
|
||||
}
|
||||
|
||||
@@ -928,6 +943,8 @@
|
||||
function destroy() {
|
||||
disposeObservers();
|
||||
disposeCompletions.value?.();
|
||||
resizeObserver.value?.disconnect();
|
||||
resizeObserver.value = undefined;
|
||||
if (localDiffEditor.value !== undefined) {
|
||||
localDiffEditor.value?.dispose();
|
||||
localDiffEditor.value?.getModel()?.modified?.dispose();
|
||||
|
||||
@@ -235,7 +235,7 @@
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useRoute} from "vue-router";
|
||||
import _groupBy from "lodash/groupBy";
|
||||
import {computed, ref, useTemplateRef, watch} from "vue";
|
||||
import {computed, nextTick, ref, useTemplateRef, watch} from "vue";
|
||||
|
||||
import Check from "vue-material-design-icons/Check.vue";
|
||||
import Delete from "vue-material-design-icons/Delete.vue";
|
||||
@@ -272,7 +272,6 @@
|
||||
import DataTable from "../layout/DataTable.vue";
|
||||
import _merge from "lodash/merge";
|
||||
import {type DataTableRef, useDataTableActions} from "../../composables/useDataTableActions.ts";
|
||||
|
||||
const dataTable = useTemplateRef<DataTableRef>("dataTable");
|
||||
|
||||
const loadData = async (callback?: () => void) => {
|
||||
@@ -491,6 +490,8 @@
|
||||
kv.value.key = entry.key;
|
||||
const {type, value} = await namespacesStore.kv({namespace: entry.namespace, key: entry.key});
|
||||
kv.value.type = type;
|
||||
// Force the type reset before setting the value
|
||||
await nextTick();
|
||||
if (type === "JSON") {
|
||||
kv.value.value = JSON.stringify(value);
|
||||
} else if (type === "BOOLEAN") {
|
||||
@@ -504,7 +505,7 @@
|
||||
}
|
||||
|
||||
function removeKv(namespace: string, key: string) {
|
||||
toast.confirm("delete confirm", async () => {
|
||||
toast.confirm(t("delete confirm"), async () => {
|
||||
return namespacesStore
|
||||
.deleteKv({namespace, key: key})
|
||||
.then(() => {
|
||||
@@ -543,14 +544,16 @@
|
||||
const type = kv.value.type;
|
||||
let value: any = kv.value.value;
|
||||
|
||||
if (type === "STRING" || type === "DURATION") {
|
||||
if (type === "STRING") {
|
||||
value = JSON.stringify(value);
|
||||
} else if (["DURATION", "JSON"].includes(type)) {
|
||||
value = value || "";
|
||||
} else if (type === "DATETIME") {
|
||||
value = new Date(value!).toISOString();
|
||||
} else if (type === "DATE") {
|
||||
value = new Date(value!).toISOString().split("T")[0];
|
||||
} else if (["NUMBER", "BOOLEAN", "JSON"].includes(type)) {
|
||||
value = JSON.stringify(value);
|
||||
} else {
|
||||
value = String(value);
|
||||
}
|
||||
|
||||
const contentType = "text/plain";
|
||||
@@ -605,10 +608,9 @@
|
||||
|
||||
const formRef = ref();
|
||||
|
||||
watch(() => kv.value.type, () => {
|
||||
if (formRef.value) {
|
||||
(formRef.value as any).clearValidate("value");
|
||||
}
|
||||
watch(() => kv.value.type, (newType) => {
|
||||
formRef.value?.clearValidate("value");
|
||||
if (newType === "BOOLEAN") kv.value.value = false;
|
||||
});
|
||||
|
||||
defineExpose({
|
||||
|
||||
@@ -72,7 +72,7 @@
|
||||
if (props.labels.length === 0) {
|
||||
addItem();
|
||||
} else {
|
||||
locals.value = [...props.labels];
|
||||
locals.value = props.labels;
|
||||
if (locals.value.length === 0) {
|
||||
addItem();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<ContextInfoContent :title="t('feeds.title')">
|
||||
<ContextInfoContent ref="contextInfoRef" :title="t('feeds.title')">
|
||||
<div
|
||||
class="post"
|
||||
:class="{
|
||||
@@ -46,9 +46,10 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import {computed, onMounted, reactive} from "vue";
|
||||
import {computed, onMounted, reactive, ref} from "vue";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useStorage} from "@vueuse/core"
|
||||
import {useScrollMemory} from "../../composables/useScrollMemory"
|
||||
|
||||
import OpenInNew from "vue-material-design-icons/OpenInNew.vue";
|
||||
import MenuDown from "vue-material-design-icons/MenuDown.vue";
|
||||
@@ -62,6 +63,7 @@
|
||||
const apiStore = useApiStore();
|
||||
const {t} = useI18n({useScope: "global"});
|
||||
|
||||
const contextInfoRef = ref<InstanceType<typeof ContextInfoContent> | null>(null);
|
||||
const feeds = computed(() => apiStore.feeds);
|
||||
|
||||
const expanded = reactive<Record<string, boolean>>({});
|
||||
@@ -70,6 +72,9 @@
|
||||
onMounted(() => {
|
||||
lastNewsReadDate.value = feeds.value[0].publicationDate;
|
||||
});
|
||||
|
||||
const scrollableElement = computed(() => contextInfoRef.value?.contentRef || null)
|
||||
useScrollMemory(ref("context-panel-news"), scrollableElement as any)
|
||||
</script>
|
||||
|
||||
<style scoped lang="scss">
|
||||
|
||||
@@ -53,10 +53,11 @@
|
||||
if (isChecked(label)) {
|
||||
const replacementQuery = {...route.query};
|
||||
delete replacementQuery[getKey(label.key)];
|
||||
replacementQuery.page = "1";
|
||||
router.replace({query: replacementQuery});
|
||||
} else {
|
||||
router.replace({
|
||||
query: {...route.query, [getKey(label.key)]: label.value},
|
||||
query: {...route.query, [getKey(label.key)]: label.value, page: "1"},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<div class="position-relative">
|
||||
<div ref="container" class="position-relative">
|
||||
<div v-if="hasSelection && data.length" class="bulk-select-header">
|
||||
<slot name="select-actions" />
|
||||
</div>
|
||||
@@ -9,12 +9,8 @@
|
||||
v-bind="$attrs"
|
||||
:data
|
||||
:rowKey
|
||||
:emptyText="data.length === 0 && infiniteScrollLoad === undefined ? noDataText : ''"
|
||||
:emptyText="data.length === 0 ? noDataText : ''"
|
||||
@selection-change="selectionChanged"
|
||||
v-el-table-infinite-scroll="infiniteScrollLoadWithDisableHandling"
|
||||
:infiniteScrollDisabled="infiniteScrollLoad === undefined ? true : infiniteScrollDisabled"
|
||||
:infiniteScrollDelay="0"
|
||||
:height="data.length === 0 && infiniteScrollLoad === undefined ? '100px' : tableHeight"
|
||||
>
|
||||
<el-table-column type="selection" v-if="selectable && showSelection" reserveSelection />
|
||||
<slot name="default" />
|
||||
@@ -22,155 +18,107 @@
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import elTableInfiniteScroll from "el-table-infinite-scroll";
|
||||
<script setup lang="ts">
|
||||
import {ref, onMounted, onUnmounted, onUpdated, watch} from "vue";
|
||||
|
||||
export default {
|
||||
data() {
|
||||
return {
|
||||
hasSelection: false,
|
||||
infiniteScrollDisabled: false,
|
||||
tableHeight: this.infiniteScrollLoad === undefined ? "auto" : "100%"
|
||||
}
|
||||
},
|
||||
expose: ["resetInfiniteScroll", "setSelection", "waitTableRender", "toggleRowExpansion"],
|
||||
computed: {
|
||||
scrollWrapper() {
|
||||
if (this.data) {
|
||||
return this.$refs.table?.$el?.querySelector(".el-scrollbar__wrap");
|
||||
}
|
||||
const props = withDefaults(defineProps<{
|
||||
showSelection?: boolean;
|
||||
selectable?: boolean;
|
||||
expandable?: boolean;
|
||||
data?: any[];
|
||||
noDataText?: string;
|
||||
rowKey?: string | ((row: any) => string | number);
|
||||
}>(), {
|
||||
showSelection: true,
|
||||
selectable: true,
|
||||
expandable: false,
|
||||
data: () => [],
|
||||
noDataText: undefined,
|
||||
rowKey: "id"
|
||||
});
|
||||
|
||||
return undefined;
|
||||
},
|
||||
tableView() {
|
||||
if (this.data) {
|
||||
return this.scrollWrapper?.querySelector(".el-scrollbar__view");
|
||||
}
|
||||
const emit = defineEmits<{
|
||||
"selection-change": [selection: any[]];
|
||||
}>();
|
||||
|
||||
return undefined;
|
||||
},
|
||||
stillHaveDataToFetch() {
|
||||
return this.infiniteScrollDisabled === false;
|
||||
},
|
||||
},
|
||||
directives: {
|
||||
elTableInfiniteScroll
|
||||
},
|
||||
methods: {
|
||||
async resetInfiniteScroll() {
|
||||
this.infiniteScrollDisabled = false;
|
||||
this.tableHeight = await this.computeTableHeight();
|
||||
},
|
||||
async toggleRowExpansion(row, expand){
|
||||
this.$refs.table.toggleRowExpansion(row, expand)
|
||||
// this.$refs.table.clearSelection()
|
||||
},
|
||||
async waitTableRender() {
|
||||
if (this.tableView === undefined) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
const table = ref<any>(null);
|
||||
const hasSelection = ref(false);
|
||||
const container = ref<HTMLElement>();
|
||||
|
||||
if (this.tableView.querySelectorAll(".el-table__body > tbody > *")?.length === this.data?.length) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
const toggleRowExpansion = (row: any, expand?: boolean) => {
|
||||
table.value?.toggleRowExpansion(row, expand);
|
||||
};
|
||||
|
||||
return new Promise(resolve => {
|
||||
const observer = new MutationObserver(([{target}]) => {
|
||||
if (target.childElementCount === this.data?.length) {
|
||||
observer.disconnect();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
const selectionChanged = (selection: any[]) => {
|
||||
hasSelection.value = selection.length > 0;
|
||||
emit("selection-change", selection);
|
||||
};
|
||||
|
||||
observer.observe(this.tableView.querySelector(".el-table__body > tbody"), {childList: true});
|
||||
});
|
||||
},
|
||||
selectionChanged(selection) {
|
||||
this.hasSelection = selection.length > 0;
|
||||
this.$emit("selection-change", selection);
|
||||
},
|
||||
setSelection(selection) {
|
||||
this.$refs.table.clearSelection();
|
||||
if (Array.isArray(selection)) {
|
||||
const isFunction = typeof this.rowKey === "function";
|
||||
selection.forEach(sel => {
|
||||
const row = this.data.find(r => isFunction
|
||||
? this.rowKey(r) === this.rowKey(sel)
|
||||
: r[this.rowKey] === sel[this.rowKey]);
|
||||
if (row) this.$refs.table.toggleRowSelection(row, true);
|
||||
});
|
||||
}
|
||||
this.selectionChanged(selection);
|
||||
},
|
||||
computeHeaderSize() {
|
||||
const tableElement = this.$refs.table?.$el;
|
||||
const clearSelection = () => {
|
||||
table.value?.clearSelection();
|
||||
hasSelection.value = false;
|
||||
};
|
||||
|
||||
if(!tableElement) return;
|
||||
const setSelection = (selection: any[]) => {
|
||||
table.value?.clearSelection();
|
||||
if (Array.isArray(selection)) {
|
||||
const isFunction = typeof props.rowKey === "function";
|
||||
selection.forEach(sel => {
|
||||
const row = props.data.find(r => isFunction
|
||||
? props.rowKey(r) === props.rowKey(sel)
|
||||
: r[props.rowKey] === sel[props.rowKey]);
|
||||
if (row) table.value?.toggleRowSelection(row, true);
|
||||
});
|
||||
}
|
||||
selectionChanged(selection);
|
||||
};
|
||||
|
||||
this.$el.style.setProperty("--table-header-width", `${tableElement.clientWidth}px`);
|
||||
this.$el.style.setProperty("--table-header-height", `${tableElement.querySelector("thead").clientHeight}px`);
|
||||
},
|
||||
async computeTableHeight() {
|
||||
await this.waitTableRender();
|
||||
const computeHeaderSize = () => {
|
||||
const tableElement = table.value?.$el;
|
||||
if (!tableElement || !container.value) return;
|
||||
container.value.style.setProperty("--table-header-width", `${tableElement.clientWidth}px`);
|
||||
container.value.style.setProperty("--table-header-height", `${tableElement.querySelector("thead").clientHeight}px`);
|
||||
};
|
||||
|
||||
if (this.infiniteScrollLoad === undefined || this.scrollWrapper === undefined) {
|
||||
return "auto";
|
||||
}
|
||||
onMounted(() => {
|
||||
window.addEventListener("resize", computeHeaderSize);
|
||||
});
|
||||
|
||||
if (!this.stillHaveDataToFetch && this.data.length === 0) {
|
||||
return "calc(var(--table-header-height) + 60px)";
|
||||
}
|
||||
onUnmounted(() => {
|
||||
window.removeEventListener("resize", computeHeaderSize);
|
||||
});
|
||||
|
||||
return this.stillHaveDataToFetch || this.tableView === undefined ? "100%" : `min(${this.tableView.scrollHeight}px, 100%)`;
|
||||
},
|
||||
async infiniteScrollLoadWithDisableHandling() {
|
||||
let load = await this.infiniteScrollLoad?.();
|
||||
while (load !== undefined && load.length === 0) {
|
||||
load = await this.infiniteScrollLoad?.();
|
||||
}
|
||||
onUpdated(() => {
|
||||
computeHeaderSize();
|
||||
});
|
||||
|
||||
this.infiniteScrollDisabled = load === undefined;
|
||||
|
||||
return load;
|
||||
}
|
||||
},
|
||||
props: {
|
||||
showSelection: {type: Boolean, default: true},
|
||||
selectable: {type: Boolean, default: true},
|
||||
expandable: {type: Boolean, default: false},
|
||||
data: {type: Array, default: () => []},
|
||||
noDataText: {type: String, default: undefined},
|
||||
infiniteScrollLoad: {type: Function, default: undefined},
|
||||
rowKey: {type: [String, Function], default: "id"}
|
||||
},
|
||||
emits: [
|
||||
"selection-change"
|
||||
],
|
||||
async mounted() {
|
||||
window.addEventListener("resize", this.computeHeaderSize);
|
||||
},
|
||||
unmounted() {
|
||||
window.removeEventListener("resize", this.computeHeaderSize);
|
||||
},
|
||||
updated() {
|
||||
this.computeHeaderSize();
|
||||
},
|
||||
watch: {
|
||||
data: {
|
||||
async handler() {
|
||||
this.tableHeight = await this.computeTableHeight();
|
||||
},
|
||||
immediate: true
|
||||
},
|
||||
async stillHaveDataToFetch(newVal, oldVal) {
|
||||
if (oldVal !== newVal) {
|
||||
this.tableHeight = await this.computeTableHeight();
|
||||
}
|
||||
watch(() => props.data, () => {
|
||||
if (props.data.length === 0) {
|
||||
hasSelection.value = false;
|
||||
table.value?.clearSelection();
|
||||
} else {
|
||||
const currentSelection = table.value?.getSelectionRows() ?? [];
|
||||
const validSelection = currentSelection.filter((sel: any) => {
|
||||
const isFunction = typeof props.rowKey === "function";
|
||||
return props.data.some(r => isFunction
|
||||
? props.rowKey(r) === props.rowKey(sel)
|
||||
: r[props.rowKey] === sel[props.rowKey]);
|
||||
});
|
||||
if (validSelection.length !== currentSelection.length) {
|
||||
table.value?.clearSelection();
|
||||
hasSelection.value = false;
|
||||
} else if (table.value) {
|
||||
selectionChanged(currentSelection);
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
}, {immediate: true});
|
||||
|
||||
defineExpose({
|
||||
setSelection,
|
||||
clearSelection,
|
||||
toggleRowExpansion
|
||||
});
|
||||
</script>
|
||||
<style scoped lang="scss">
|
||||
.bulk-select-header {
|
||||
z-index: 1;
|
||||
@@ -186,4 +134,12 @@
|
||||
z-index: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 500px) {
|
||||
:deep(.el-table__empty-text) {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<TopNavBar v-if="!embed" :title="routeInfo.title" />
|
||||
<section v-bind="$attrs" :class="{'container': !embed}" class="log-panel">
|
||||
<div class="log-content">
|
||||
<DataTable @page-changed="onPageChanged" ref="dataTable" :total="logsStore.total" :size="pageSize" :page="pageNumber" :embed="embed">
|
||||
<DataTable @page-changed="onPageChanged" ref="dataTable" :total="logsStore.total" :size="internalPageSize" :page="internalPageNumber" :embed="embed">
|
||||
<template #navbar v-if="!embed || showFilters">
|
||||
<KSFilter
|
||||
:configuration="logFilter"
|
||||
@@ -11,8 +11,9 @@
|
||||
refresh: {shown: true, callback: refresh},
|
||||
columns: {shown: false}
|
||||
}"
|
||||
:defaultScope="false"
|
||||
/>
|
||||
</template>
|
||||
</template>xx
|
||||
|
||||
<template v-if="showStatChart()" #top>
|
||||
<Sections ref="dashboard" :charts :dashboard="{id: 'default', charts: []}" showDefault />
|
||||
@@ -20,7 +21,7 @@
|
||||
|
||||
<template #table>
|
||||
<div v-loading="isLoading">
|
||||
<div v-if="logsStore.logs !== undefined && logsStore.logs.length > 0" class="logs-wrapper">
|
||||
<div v-if="logsStore.logs !== undefined && logsStore.logs?.length > 0" class="logs-wrapper">
|
||||
<LogLine
|
||||
v-for="(log, i) in logsStore.logs"
|
||||
:key="`${log.taskRunId}-${i}`"
|
||||
@@ -42,6 +43,11 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import {ref, computed, onMounted, watch, useTemplateRef} from "vue";
|
||||
import {useRoute} from "vue-router";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import _merge from "lodash/merge";
|
||||
import moment from "moment";
|
||||
import {useLogFilter} from "../filter/configurations";
|
||||
import KSFilter from "../filter/components/KSFilter.vue";
|
||||
import Sections from "../dashboard/sections/Sections.vue";
|
||||
@@ -49,193 +55,151 @@
|
||||
import TopNavBar from "../../components/layout/TopNavBar.vue";
|
||||
import LogLine from "../logs/LogLine.vue";
|
||||
import NoData from "../layout/NoData.vue";
|
||||
|
||||
const logFilter = useLogFilter();
|
||||
</script>
|
||||
|
||||
<script lang="ts">
|
||||
import {mapStores} from "pinia";
|
||||
import RouteContext from "../../mixins/routeContext";
|
||||
import RestoreUrl from "../../mixins/restoreUrl";
|
||||
import DataTableActions from "../../mixins/dataTableActions";
|
||||
import _merge from "lodash/merge";
|
||||
import {storageKeys} from "../../utils/constants";
|
||||
import {decodeSearchParams} from "../filter/utils/helpers";
|
||||
import * as YAML_UTILS from "@kestra-io/ui-libs/flow-yaml-utils";
|
||||
import YAML_CHART from "../dashboard/assets/logs_timeseries_chart.yaml?raw";
|
||||
import {useLogsStore} from "../../stores/logs";
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
import {defineComponent} from "vue";
|
||||
import {useDataTableActions} from "../../composables/useDataTableActions";
|
||||
import useRouteContext from "../../composables/useRouteContext";
|
||||
|
||||
export default defineComponent({
|
||||
mixins: [RouteContext, RestoreUrl, DataTableActions],
|
||||
props: {
|
||||
logLevel: {
|
||||
type: String,
|
||||
default: undefined
|
||||
},
|
||||
embed: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
showFilters: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
filters: {
|
||||
type: Object,
|
||||
default: null
|
||||
},
|
||||
reloadLogs: {
|
||||
type: Number,
|
||||
default: undefined
|
||||
}
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
isDefaultNamespaceAllow: true,
|
||||
task: undefined,
|
||||
isLoading: false,
|
||||
lastRefreshDate: new Date(),
|
||||
canAutoRefresh: false,
|
||||
showChart: localStorage.getItem(storageKeys.SHOW_LOGS_CHART) !== "false",
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
storageKeys() {
|
||||
return storageKeys
|
||||
},
|
||||
...mapStores(useLogsStore),
|
||||
routeInfo() {
|
||||
return {
|
||||
title: this.$t("logs"),
|
||||
};
|
||||
},
|
||||
isFlowEdit() {
|
||||
return this.$route.name === "flows/update"
|
||||
},
|
||||
isNamespaceEdit() {
|
||||
return this.$route.name === "namespaces/update"
|
||||
},
|
||||
selectedLogLevel() {
|
||||
const decodedParams = decodeSearchParams(this.$route.query);
|
||||
const levelFilters = decodedParams.filter(item => item?.field === "level");
|
||||
const decoded = levelFilters.length > 0 ? levelFilters[0]?.value : "INFO";
|
||||
return this.logLevel || decoded || localStorage.getItem("defaultLogLevel") || "INFO";
|
||||
},
|
||||
endDate() {
|
||||
if (this.$route.query.endDate) {
|
||||
return this.$route.query.endDate;
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
startDate() {
|
||||
// we mention the last refresh date here to trick
|
||||
// VueJs fine grained reactivity system and invalidate
|
||||
// computed property startDate
|
||||
if (this.$route.query.startDate && this.lastRefreshDate) {
|
||||
return this.$route.query.startDate;
|
||||
}
|
||||
if (this.$route.query.timeRange) {
|
||||
return this.$moment().subtract(this.$moment.duration(this.$route.query.timeRange).as("milliseconds")).toISOString(true);
|
||||
}
|
||||
const props = withDefaults(defineProps<{
|
||||
logLevel?: string;
|
||||
embed?: boolean;
|
||||
showFilters?: boolean;
|
||||
filters?: Record<string, any>;
|
||||
reloadLogs?: number;
|
||||
}>(), {
|
||||
embed: false,
|
||||
showFilters: false,
|
||||
filters: undefined,
|
||||
logLevel: undefined,
|
||||
reloadLogs: undefined
|
||||
});
|
||||
|
||||
// the default is PT30D
|
||||
return this.$moment().subtract(7, "days").toISOString(true);
|
||||
},
|
||||
namespace() {
|
||||
return this.$route.params.namespace ?? this.$route.params.id;
|
||||
},
|
||||
flowId() {
|
||||
return this.$route.params.id;
|
||||
},
|
||||
charts() {
|
||||
return [
|
||||
{...YAML_UTILS.parse(YAML_CHART), content: YAML_CHART}
|
||||
];
|
||||
}
|
||||
},
|
||||
beforeRouteEnter(to: any, _: any, next: (route?: any) => void) {
|
||||
const query = {...to.query};
|
||||
let queryHasChanged = false;
|
||||
const route = useRoute();
|
||||
const {t} = useI18n();
|
||||
const logsStore = useLogsStore();
|
||||
const logFilter = useLogFilter();
|
||||
|
||||
const queryKeys = Object.keys(query);
|
||||
if (defaultNamespace() && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace();
|
||||
queryHasChanged = true;
|
||||
}
|
||||
const routeInfo = computed(() => ({
|
||||
title: t("logs"),
|
||||
}));
|
||||
useRouteContext(routeInfo, props.embed);
|
||||
|
||||
if (queryHasChanged) {
|
||||
next({
|
||||
...to,
|
||||
query,
|
||||
replace: true
|
||||
});
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
showStatChart() {
|
||||
return this.showChart;
|
||||
},
|
||||
onShowChartChange(value: boolean) {
|
||||
this.showChart = value;
|
||||
localStorage.setItem(storageKeys.SHOW_LOGS_CHART, value.toString());
|
||||
if (this.showStatChart()) {
|
||||
this.load();
|
||||
}
|
||||
},
|
||||
refresh() {
|
||||
this.lastRefreshDate = new Date();
|
||||
if (this.$refs.dashboard) {
|
||||
this.$refs.dashboard.refreshCharts();
|
||||
}
|
||||
this.load();
|
||||
},
|
||||
loadQuery(base: any) {
|
||||
let queryFilter = this.filters ?? this.queryWithFilter();
|
||||
const isLoading = ref(false);
|
||||
const lastRefreshDate = ref(new Date());
|
||||
const showChart = ref(localStorage.getItem(storageKeys.SHOW_LOGS_CHART) !== "false");
|
||||
const dashboardRef = useTemplateRef("dashboard");
|
||||
|
||||
if (this.isFlowEdit) {
|
||||
queryFilter["filters[namespace][EQUALS]"] = this.namespace;
|
||||
queryFilter["filters[flowId][EQUALS]"] = this.flowId;
|
||||
} else if (this.isNamespaceEdit) {
|
||||
queryFilter["filters[namespace][EQUALS]"] = this.namespace;
|
||||
}
|
||||
const isFlowEdit = computed(() => route.name === "flows/update");
|
||||
const isNamespaceEdit = computed(() => route.name === "namespaces/update");
|
||||
const selectedLogLevel = computed(() => {
|
||||
const decodedParams = decodeSearchParams(route.query);
|
||||
const levelFilters = decodedParams.filter(item => item?.field === "level");
|
||||
const decoded = levelFilters.length > 0 ? levelFilters[0]?.value : "INFO";
|
||||
return props.logLevel || decoded || localStorage.getItem("defaultLogLevel") || "INFO";
|
||||
});
|
||||
const endDate = computed(() => {
|
||||
if (route.query.endDate) {
|
||||
return route.query.endDate;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
const startDate = computed(() => {
|
||||
// we mention the last refresh date here to trick
|
||||
// VueJs fine grained reactivity system and invalidate
|
||||
// computed property startDate
|
||||
if (route.query.startDate && lastRefreshDate.value) {
|
||||
return route.query.startDate;
|
||||
}
|
||||
if (route.query.timeRange) {
|
||||
return moment().subtract(moment.duration(route.query.timeRange as string).as("milliseconds")).toISOString(true);
|
||||
}
|
||||
|
||||
if (!queryFilter["startDate"] || !queryFilter["endDate"]) {
|
||||
queryFilter["startDate"] = this.startDate;
|
||||
queryFilter["endDate"] = this.endDate;
|
||||
}
|
||||
// the default is PT30D
|
||||
return moment().subtract(7, "days").toISOString(true);
|
||||
});
|
||||
const flowId = computed(() => route.params.id);
|
||||
const namespace = computed(() => route.params.namespace ?? route.params.id);
|
||||
const charts = computed(() => [
|
||||
{...YAML_UTILS.parse(YAML_CHART), content: YAML_CHART}
|
||||
]);
|
||||
|
||||
delete queryFilter["level"];
|
||||
const loadQuery = (base: any) => {
|
||||
let queryFilter = props.filters ?? queryWithFilter();
|
||||
|
||||
return _merge(base, queryFilter)
|
||||
},
|
||||
load() {
|
||||
this.isLoading = true
|
||||
if (isFlowEdit.value) {
|
||||
queryFilter["filters[namespace][EQUALS]"] = namespace.value;
|
||||
queryFilter["filters[flowId][EQUALS]"] = flowId.value;
|
||||
} else if (isNamespaceEdit.value) {
|
||||
queryFilter["filters[namespace][EQUALS]"] = namespace.value;
|
||||
}
|
||||
|
||||
const data = {
|
||||
page: this.filters ? this.internalPageNumber : this.$route.query.page || this.internalPageNumber,
|
||||
size: this.filters ? this.internalPageSize : this.$route.query.size || this.internalPageSize,
|
||||
...this.filters
|
||||
};
|
||||
this.logsStore.findLogs(this.loadQuery({
|
||||
...data,
|
||||
minLevel: this.filters ? null : this.selectedLogLevel,
|
||||
sort: "timestamp:desc"
|
||||
}))
|
||||
.finally(() => {
|
||||
this.isLoading = false
|
||||
this.saveRestoreUrl();
|
||||
});
|
||||
if (!queryFilter["startDate"] || !queryFilter["endDate"]) {
|
||||
queryFilter["startDate"] = startDate.value;
|
||||
queryFilter["endDate"] = endDate.value;
|
||||
}
|
||||
|
||||
},
|
||||
},
|
||||
watch: {
|
||||
reloadLogs(newValue) {
|
||||
if(newValue) this.refresh();
|
||||
},
|
||||
delete queryFilter["level"];
|
||||
|
||||
return _merge(base, queryFilter);
|
||||
};
|
||||
|
||||
const loadData = (callback?: () => void) => {
|
||||
isLoading.value = true;
|
||||
|
||||
const data = {
|
||||
page: props.filters ? internalPageNumber.value : route.query.page || internalPageNumber.value,
|
||||
size: props.filters ? internalPageSize.value : route.query.size || internalPageSize.value,
|
||||
...props.filters
|
||||
};
|
||||
|
||||
logsStore.findLogs(loadQuery({
|
||||
...data,
|
||||
minLevel: props.filters ? null : selectedLogLevel.value,
|
||||
sort: "timestamp:desc"
|
||||
}))
|
||||
.finally(() => {
|
||||
isLoading.value = false;
|
||||
if (callback) callback();
|
||||
});
|
||||
};
|
||||
|
||||
const {onPageChanged, queryWithFilter, internalPageNumber, internalPageSize} = useDataTableActions({
|
||||
loadData
|
||||
});
|
||||
|
||||
const showStatChart = () => showChart.value;
|
||||
|
||||
const onShowChartChange = (value: boolean) => {
|
||||
showChart.value = value;
|
||||
localStorage.setItem(storageKeys.SHOW_LOGS_CHART, value.toString());
|
||||
if (showStatChart()) {
|
||||
loadData();
|
||||
}
|
||||
};
|
||||
|
||||
const refresh = () => {
|
||||
lastRefreshDate.value = new Date();
|
||||
if (dashboardRef.value) {
|
||||
dashboardRef.value.refreshCharts();
|
||||
}
|
||||
loadData();
|
||||
};
|
||||
|
||||
watch(() => route.query, () => {
|
||||
loadData();
|
||||
}, {deep: true});
|
||||
|
||||
watch(() => props.reloadLogs, (newValue) => {
|
||||
if (newValue) refresh();
|
||||
});
|
||||
|
||||
onMounted(() => {
|
||||
// Load data on mount if not embedded
|
||||
if (!props.embed) {
|
||||
loadData();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
|
||||
const namespace = computed(() => route.params?.id) as Ref<string>;
|
||||
|
||||
const miscStore = useMiscStore();
|
||||
const namespacesStore = useNamespacesStore();
|
||||
|
||||
watch(namespace, (newID) => {
|
||||
@@ -40,13 +41,12 @@
|
||||
});
|
||||
|
||||
watch(() => route.params.tab, (newTab) => {
|
||||
if (newTab === "overview") {
|
||||
if (newTab === "overview" || newTab === "executions") {
|
||||
const dateTimeKeys = ["startDate", "endDate", "timeRange"];
|
||||
|
||||
if (!Object.keys(route.query).some((key) => dateTimeKeys.some((dateTimeKey) => key.includes(dateTimeKey)))) {
|
||||
const miscStore = useMiscStore();
|
||||
const defaultDuration = miscStore.configs?.chartDefaultDuration || "P30D";
|
||||
const newQuery = {...route.query, "filters[timeRange][EQUALS]": defaultDuration};
|
||||
const DEFAULT_DURATION = miscStore.configs?.chartDefaultDuration ?? "P30D";
|
||||
const newQuery = {...route.query, "filters[timeRange][EQUALS]": DEFAULT_DURATION};
|
||||
router.replace({name: route.name, params: route.params, query: newQuery});
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user