mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-25 11:12:12 -05:00
Compare commits
192 Commits
run-develo
...
kestra_wip
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
31e66c070b | ||
|
|
a7a905a803 | ||
|
|
03a3f2c865 | ||
|
|
ea36532aa4 | ||
|
|
d5222ee1ee | ||
|
|
6599ce59df | ||
|
|
b4a87c3ef3 | ||
|
|
3be091ca5f | ||
|
|
b8f6102b27 | ||
|
|
4f13119e94 | ||
|
|
cbae83aa2b | ||
|
|
c208fe1cb9 | ||
|
|
c46e58048b | ||
|
|
b5cda54342 | ||
|
|
6f9ae15661 | ||
|
|
e23f9df7e5 | ||
|
|
3ddfbfdf13 | ||
|
|
ebbe6e8839 | ||
|
|
5f8095d6c8 | ||
|
|
d23c77a974 | ||
|
|
a2fa79086c | ||
|
|
18b0584150 | ||
|
|
6bf234b16f | ||
|
|
938e17d59c | ||
|
|
11016316b5 | ||
|
|
c5188074a9 | ||
|
|
638d9979fd | ||
|
|
3252b695bc | ||
|
|
9158052cff | ||
|
|
b209b6358e | ||
|
|
594429aebb | ||
|
|
2397286fa2 | ||
|
|
8c52f8694c | ||
|
|
98923e33c9 | ||
|
|
7440855f47 | ||
|
|
bd8a22026f | ||
|
|
89f2632135 | ||
|
|
3be2306f98 | ||
|
|
b0b58372a0 | ||
|
|
8f29b09959 | ||
|
|
c400f71b54 | ||
|
|
cdd841af0f | ||
|
|
c14d12b724 | ||
|
|
e76d151a32 | ||
|
|
f997c22068 | ||
|
|
9912a2df63 | ||
|
|
6761dd90ce | ||
|
|
3af0b49c89 | ||
|
|
58bcb1d16c | ||
|
|
20dd44a5d7 | ||
|
|
3f848454d4 | ||
|
|
9d54f4c407 | ||
|
|
490b0d9e3f | ||
|
|
b222570f39 | ||
|
|
a5246091d7 | ||
|
|
adcdab7e7e | ||
|
|
4e50f4c363 | ||
|
|
878a29989c | ||
|
|
c532fc3cc8 | ||
|
|
59ffa3d713 | ||
|
|
def7ad7a4b | ||
|
|
ed8e810791 | ||
|
|
4e3a786c3b | ||
|
|
80ad684275 | ||
|
|
dbc8f33d26 | ||
|
|
f3abfdfd61 | ||
|
|
d5ba7e7304 | ||
|
|
0df41b439d | ||
|
|
2ee8ea4dc6 | ||
|
|
9ca4f9d975 | ||
|
|
52caaee6fa | ||
|
|
2838dfae73 | ||
|
|
1ec5c3a512 | ||
|
|
2e2b05c227 | ||
|
|
a0205cc710 | ||
|
|
e4cb4c1f64 | ||
|
|
824179ea1e | ||
|
|
40d33f91d1 | ||
|
|
4ff24c6665 | ||
|
|
73582ee3b8 | ||
|
|
4e1f68ac35 | ||
|
|
aef6649530 | ||
|
|
8635ea505b | ||
|
|
e302b4be4a | ||
|
|
8e7ad9ae25 | ||
|
|
41a11abf16 | ||
|
|
1be16d5e9d | ||
|
|
e263224d7b | ||
|
|
12b89588a6 | ||
|
|
eae5eb80cb | ||
|
|
c0f6298484 | ||
|
|
ba1d6b2232 | ||
|
|
048dcb80cc | ||
|
|
a81de811d7 | ||
|
|
a960a9f982 | ||
|
|
c4d4fd935f | ||
|
|
f063a5a2d9 | ||
|
|
ac91d5605f | ||
|
|
e3d3c3651b | ||
|
|
5b6836237e | ||
|
|
2f8284b133 | ||
|
|
42992fd7c3 | ||
|
|
3a481f93d3 | ||
|
|
7e964ae563 | ||
|
|
25e54edbc9 | ||
|
|
e88dc7af76 | ||
|
|
b7a027f0dc | ||
|
|
98141d6010 | ||
|
|
bf119ab6df | ||
|
|
9bd6353b77 | ||
|
|
c0ab581cf1 | ||
|
|
0f38e19663 | ||
|
|
0c14ea621c | ||
|
|
fb14e57a7c | ||
|
|
09c707d865 | ||
|
|
86e08d71dd | ||
|
|
94c00cedeb | ||
|
|
eb12832b1e | ||
|
|
687cefdfb9 | ||
|
|
8eae8aba72 | ||
|
|
abdbb8d364 | ||
|
|
8a55ab3af6 | ||
|
|
b7cb933e1e | ||
|
|
3af003e5e4 | ||
|
|
c3861a5532 | ||
|
|
ae1f10f45a | ||
|
|
612dccfb8c | ||
|
|
2ae8df2f5f | ||
|
|
1abfa74a16 | ||
|
|
69a793b227 | ||
|
|
35ccb3e39b | ||
|
|
3a7fcb2aa1 | ||
|
|
103c5b92e9 | ||
|
|
5253eeef95 | ||
|
|
848f835191 | ||
|
|
3e55e67534 | ||
|
|
7bca8b4924 | ||
|
|
56febfb415 | ||
|
|
925b8c6954 | ||
|
|
708816fe67 | ||
|
|
5502473fa4 | ||
|
|
c6cf0147a4 | ||
|
|
2951f4b4bc | ||
|
|
4ea13e258b | ||
|
|
3f8dcb47fd | ||
|
|
42dc3b930c | ||
|
|
97a78abd28 | ||
|
|
b3b2ef1b5a | ||
|
|
596a26a137 | ||
|
|
8a9a1df436 | ||
|
|
55d0880ed3 | ||
|
|
a74ebd5cd6 | ||
|
|
f3aed38964 | ||
|
|
2595e56199 | ||
|
|
e821bd7f65 | ||
|
|
09762d2a8d | ||
|
|
018c22918f | ||
|
|
3e9c8cf7da | ||
|
|
008404e442 | ||
|
|
2b224bcde8 | ||
|
|
1977b61693 | ||
|
|
8e2267f86c | ||
|
|
24355c2a88 | ||
|
|
51adcfa908 | ||
|
|
a55baa1f96 | ||
|
|
32793fde18 | ||
|
|
4381d585ec | ||
|
|
e595e26c45 | ||
|
|
b833cf28b5 | ||
|
|
ac11e9545c | ||
|
|
a07df5f6cd | ||
|
|
f626c85346 | ||
|
|
e15b53ebb5 | ||
|
|
7edb6bc379 | ||
|
|
78c81f932b | ||
|
|
56bb3ca29c | ||
|
|
14029e8c14 | ||
|
|
bea3d63d89 | ||
|
|
24a3bbd303 | ||
|
|
f9932af2e8 | ||
|
|
e0410c8f24 | ||
|
|
424a6cb41a | ||
|
|
afde71e913 | ||
|
|
086c32e711 | ||
|
|
710abcfaac | ||
|
|
be951d015c | ||
|
|
a07260bef4 | ||
|
|
dd19f8391d | ||
|
|
354873e220 | ||
|
|
386d4a15f0 | ||
|
|
1b75f15680 | ||
|
|
957bf74d97 |
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
@@ -51,7 +51,7 @@ updates:
|
||||
|
||||
storybook:
|
||||
applies-to: version-updates
|
||||
patterns: ["storybook*", "@storybook/*"]
|
||||
patterns: ["storybook*", "@storybook/*", "eslint-plugin-storybook"]
|
||||
|
||||
vitest:
|
||||
applies-to: version-updates
|
||||
@@ -67,10 +67,10 @@ updates:
|
||||
"@types/*",
|
||||
"storybook*",
|
||||
"@storybook/*",
|
||||
"eslint-plugin-storybook",
|
||||
"vitest",
|
||||
"@vitest/*",
|
||||
# Temporary exclusion of these packages from major updates
|
||||
"eslint-plugin-storybook",
|
||||
"eslint-plugin-vue",
|
||||
]
|
||||
|
||||
@@ -84,6 +84,7 @@ updates:
|
||||
"@types/*",
|
||||
"storybook*",
|
||||
"@storybook/*",
|
||||
"eslint-plugin-storybook",
|
||||
"vitest",
|
||||
"@vitest/*",
|
||||
# Temporary exclusion of these packages from minor updates
|
||||
@@ -102,6 +103,7 @@ updates:
|
||||
"@types/*",
|
||||
"storybook*",
|
||||
"@storybook/*",
|
||||
"eslint-plugin-storybook",
|
||||
"vitest",
|
||||
"@vitest/*",
|
||||
]
|
||||
|
||||
1
.github/workflows/main-build.yml
vendored
1
.github/workflows/main-build.yml
vendored
@@ -64,6 +64,7 @@ jobs:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
|
||||
|
||||
publish-develop-maven:
|
||||
|
||||
1
.github/workflows/release-docker.yml
vendored
1
.github/workflows/release-docker.yml
vendored
@@ -32,3 +32,4 @@ jobs:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
@@ -29,8 +29,8 @@ start_time2=$(date +%s)
|
||||
|
||||
echo "cd ./ui"
|
||||
cd ./ui
|
||||
echo "npm i"
|
||||
npm i
|
||||
echo "npm ci"
|
||||
npm ci
|
||||
|
||||
echo 'sh ./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"'
|
||||
./run-e2e-tests.sh --kestra-docker-image-to-test "kestra/kestra:$LOCAL_IMAGE_VERSION"
|
||||
|
||||
@@ -21,7 +21,7 @@ plugins {
|
||||
|
||||
// test
|
||||
id "com.adarshr.test-logger" version "4.0.0"
|
||||
id "org.sonarqube" version "7.1.0.6387"
|
||||
id "org.sonarqube" version "7.2.0.6526"
|
||||
id 'jacoco-report-aggregation'
|
||||
|
||||
// helper
|
||||
@@ -204,6 +204,9 @@ subprojects {subProj ->
|
||||
|
||||
//assertj
|
||||
testImplementation 'org.assertj:assertj-core'
|
||||
|
||||
// awaitility
|
||||
testImplementation 'org.awaitility:awaitility'
|
||||
}
|
||||
|
||||
def commonTestConfig = { Test t ->
|
||||
|
||||
@@ -31,6 +31,8 @@ dependencies {
|
||||
implementation project(":jdbc-mysql")
|
||||
implementation project(":jdbc-postgres")
|
||||
|
||||
implementation project(":queue")
|
||||
|
||||
implementation project(":storage-local")
|
||||
|
||||
// Kestra server components
|
||||
@@ -38,6 +40,7 @@ dependencies {
|
||||
implementation project(":scheduler")
|
||||
implementation project(":webserver")
|
||||
implementation project(":worker")
|
||||
implementation project(":indexer")
|
||||
|
||||
//test
|
||||
testImplementation project(':tests')
|
||||
|
||||
@@ -7,7 +7,6 @@ import io.kestra.cli.commands.namespaces.NamespaceCommand;
|
||||
import io.kestra.cli.commands.plugins.PluginCommand;
|
||||
import io.kestra.cli.commands.servers.ServerCommand;
|
||||
import io.kestra.cli.commands.sys.SysCommand;
|
||||
import io.kestra.cli.commands.templates.TemplateCommand;
|
||||
import io.kestra.cli.services.EnvironmentProvider;
|
||||
import io.micronaut.configuration.picocli.MicronautFactory;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
@@ -36,7 +35,6 @@ import java.util.stream.Stream;
|
||||
PluginCommand.class,
|
||||
ServerCommand.class,
|
||||
FlowCommand.class,
|
||||
TemplateCommand.class,
|
||||
SysCommand.class,
|
||||
ConfigCommand.class,
|
||||
NamespaceCommand.class,
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.core.runners.*;
|
||||
import io.kestra.core.server.Service;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.ExecutorsUtils;
|
||||
import io.kestra.executor.DefaultExecutor;
|
||||
import io.kestra.worker.DefaultWorker;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.annotation.Value;
|
||||
@@ -49,7 +50,7 @@ public class StandAloneRunner implements Runnable, AutoCloseable {
|
||||
running.set(true);
|
||||
|
||||
poolExecutor = executorsUtils.cachedThreadPool("standalone-runner");
|
||||
poolExecutor.execute(applicationContext.getBean(ExecutorInterface.class));
|
||||
poolExecutor.execute(applicationContext.getBean(DefaultExecutor.class));
|
||||
|
||||
if (workerEnabled) {
|
||||
// FIXME: For backward-compatibility with Kestra 0.15.x and earliest we still used UUID for Worker ID instead of IdUtils
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "expand",
|
||||
description = "Deprecated - expand a flow"
|
||||
)
|
||||
@Deprecated
|
||||
public class FlowExpandCommand extends AbstractCommand {
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The flow file to expand")
|
||||
private Path file;
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
stdErr("Warning, this functionality is deprecated and will be removed at some point.");
|
||||
String content = IncludeHelperExpander.expand(Files.readString(file), file.getParent());
|
||||
Flow flow = YamlParser.parse(content, Flow.class);
|
||||
modelValidator.validate(flow);
|
||||
stdOut(content);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,8 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "updates",
|
||||
description = "Create or update flows from a folder, and optionally delete the ones not present",
|
||||
@@ -41,7 +43,6 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantIdSelectorService;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
@@ -50,13 +51,7 @@ public class FlowUpdatesCommand extends AbstractApiCommand {
|
||||
List<String> flows = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> {
|
||||
try {
|
||||
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
})
|
||||
.map(throwFunction(path -> Files.readString(path, Charset.defaultCharset())))
|
||||
.toList();
|
||||
|
||||
String body = "";
|
||||
|
||||
@@ -4,7 +4,7 @@ import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import io.kestra.core.services.FlowValidationService;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@@ -21,7 +21,7 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
private FlowService flowService;
|
||||
private FlowValidationService flowValidationService;
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantIdSelectorService;
|
||||
@@ -39,13 +39,13 @@ public class FlowValidateCommand extends AbstractValidateCommand {
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
List<String> warnings = new ArrayList<>();
|
||||
warnings.addAll(flowService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowService.warnings(flow, tenantIdSelectorService.getTenantIdAndAllowEETenants(tenantId)));
|
||||
warnings.addAll(flowValidationService.deprecationPaths(flow).stream().map(deprecation -> deprecation + " is deprecated").toList());
|
||||
warnings.addAll(flowValidationService.warnings(flow, tenantIdSelectorService.getTenantIdAndAllowEETenants(tenantId)));
|
||||
return warnings;
|
||||
},
|
||||
(Object object) -> {
|
||||
FlowWithSource flow = (FlowWithSource) object;
|
||||
return flowService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
return flowValidationService.relocations(flow.sourceOrGenerateIfNull()).stream().map(relocation -> relocation.from() + " is replaced by " + relocation.to()).toList();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import com.google.common.io.Files;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Deprecated
|
||||
public abstract class IncludeHelperExpander {
|
||||
|
||||
public static String expand(String value, Path directory) throws IOException {
|
||||
return value.lines()
|
||||
.map(line -> line.contains("[[>") && line.contains("]]") ? expandLine(line, directory) : line)
|
||||
.collect(Collectors.joining("\n"));
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private static String expandLine(String line, Path directory) {
|
||||
String prefix = line.substring(0, line.indexOf("[[>"));
|
||||
String suffix = line.substring(line.indexOf("]]") + 2, line.length());
|
||||
String file = line.substring(line.indexOf("[[>") + 3 , line.indexOf("]]")).strip();
|
||||
Path includePath = directory.resolve(file);
|
||||
List<String> include = Files.readLines(includePath.toFile(), Charset.defaultCharset());
|
||||
|
||||
// handle single line directly with the suffix (should be between quotes or double-quotes
|
||||
if(include.size() == 1) {
|
||||
String singleInclude = include.getFirst();
|
||||
return prefix + singleInclude + suffix;
|
||||
}
|
||||
|
||||
// multi-line will be expanded with the prefix but no suffix
|
||||
return include.stream()
|
||||
.map(includeLine -> prefix + includeLine)
|
||||
.collect(Collectors.joining("\n"));
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,6 @@ package io.kestra.cli.commands.flows.namespaces;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.commands.flows.IncludeHelperExpander;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
@@ -21,6 +20,8 @@ import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwFunction;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "update",
|
||||
description = "Update flows in namespace",
|
||||
@@ -44,13 +45,7 @@ public class FlowNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCo
|
||||
List<String> flows = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> {
|
||||
try {
|
||||
return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
})
|
||||
.map(throwFunction(path -> Files.readString(path, Charset.defaultCharset())))
|
||||
.toList();
|
||||
|
||||
String body = "";
|
||||
|
||||
@@ -13,7 +13,8 @@ import picocli.CommandLine;
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TenantMigrationCommand.class,
|
||||
MetadataMigrationCommand.class
|
||||
MetadataMigrationCommand.class,
|
||||
V2TriggerMigrationCommand.class,
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
|
||||
@@ -0,0 +1,58 @@
|
||||
package io.kestra.cli.commands.migrations;
|
||||
|
||||
import com.github.javaparser.utils.Log;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.models.triggers.TriggerId;
|
||||
import io.kestra.core.repositories.TriggerRepositoryInterface;
|
||||
import io.kestra.core.scheduler.SchedulerConfiguration;
|
||||
import io.kestra.core.scheduler.model.TriggerState;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Command(
|
||||
name = "triggers",
|
||||
description = "migrate all triggers to Kestra 2.0."
|
||||
)
|
||||
public class V2TriggerMigrationCommand extends AbstractCommand {
|
||||
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@CommandLine.Option(names = "--dry-run", description = "Preview only, do not update")
|
||||
boolean dryRun;
|
||||
|
||||
@SuppressWarnings("removal")
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
if (dryRun) {
|
||||
System.out.println("🧪 Dry-run mode enabled. No changes will be applied.");
|
||||
}
|
||||
|
||||
Log.info("🔁 Starting trigger states migration...");
|
||||
TriggerRepositoryInterface repository = applicationContext.getBean(TriggerRepositoryInterface.class);
|
||||
SchedulerConfiguration configuration = applicationContext.getBean(SchedulerConfiguration.class);
|
||||
List<Trigger> triggers = repository.findAllForAllTenantsV1();
|
||||
Log.info("Found [{}] triggers to migrate.");
|
||||
triggers.forEach(trigger -> {
|
||||
try {
|
||||
TriggerState migrated = trigger.toTriggerState(configuration.vnodes());
|
||||
if (!dryRun) {
|
||||
repository.save(migrated);
|
||||
}
|
||||
System.out.println("✅ Migration complete for: " + TriggerId.of(trigger));
|
||||
} catch (Exception e) {
|
||||
System.err.println("❌ Migration failed for : " + TriggerId.of(trigger));
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
System.out.println("✅ Migration complete.");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.core.repositories.LocalFlowRepositoryLoader;
|
||||
import io.kestra.core.runners.ExecutorInterface;
|
||||
import io.kestra.core.runners.Executor;
|
||||
import io.kestra.core.services.SkipExecutionService;
|
||||
import io.kestra.core.services.StartExecutorService;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -87,7 +87,7 @@ public class ExecutorCommand extends AbstractServerCommand {
|
||||
}
|
||||
}
|
||||
|
||||
ExecutorInterface executorService = applicationContext.getBean(ExecutorInterface.class);
|
||||
Executor executorService = applicationContext.getBean(Executor.class);
|
||||
executorService.run();
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
@@ -2,7 +2,7 @@ package io.kestra.cli.commands.servers;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.ServerType;
|
||||
import io.kestra.scheduler.AbstractScheduler;
|
||||
import io.kestra.core.runners.Scheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -10,6 +10,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "scheduler",
|
||||
@@ -19,7 +20,10 @@ import java.util.Map;
|
||||
public class SchedulerCommand extends AbstractServerCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
|
||||
@CommandLine.Option(names = {"-t", "--max-threads"}, description = "The maximum number of threads used by the scheduler for evaluating triggers.")
|
||||
private Integer maxThread;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public static Map<String, Object> propertiesOverrides() {
|
||||
return ImmutableMap.of(
|
||||
@@ -30,9 +34,9 @@ public class SchedulerCommand extends AbstractServerCommand {
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
AbstractScheduler scheduler = applicationContext.getBean(AbstractScheduler.class);
|
||||
scheduler.run();
|
||||
|
||||
Scheduler scheduler = applicationContext.getBean(Scheduler.class);
|
||||
scheduler.start(Optional.ofNullable(this.maxThread).orElse(Scheduler.defaultMaxNumThreads()));
|
||||
|
||||
Await.until(() -> !this.applicationContext.isRunning());
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -12,6 +13,8 @@ import picocli.CommandLine;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "reindex",
|
||||
description = "Reindex all records of a type: read them from the database then update them",
|
||||
@@ -31,12 +34,13 @@ public class ReindexCommand extends AbstractCommand {
|
||||
|
||||
if ("flow".equals(type)) {
|
||||
FlowRepositoryInterface flowRepository = applicationContext.getBean(FlowRepositoryInterface.class);
|
||||
FlowService flowService = applicationContext.getBean(FlowService.class);
|
||||
|
||||
List<Flow> allFlow = flowRepository.findAllForAllTenants();
|
||||
allFlow.stream()
|
||||
.map(flow -> flowRepository.findByIdWithSource(flow.getTenantId(), flow.getNamespace(), flow.getId()).orElse(null))
|
||||
.filter(Objects::nonNull)
|
||||
.forEach(flow -> flowRepository.update(GenericFlow.of(flow), flow));
|
||||
.forEach(throwConsumer(flow -> flowService.update(GenericFlow.of(flow), flow)));
|
||||
|
||||
stdOut("Successfully reindex " + allFlow.size() + " flow(s).");
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
package io.kestra.cli.commands.sys;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.executor.command.ExecutionCommand;
|
||||
import io.kestra.core.executor.command.Unqueue;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.ExecutionQueued;
|
||||
import io.kestra.core.services.ConcurrencyLimitService;
|
||||
import io.kestra.jdbc.runner.AbstractJdbcExecutionQueuedStorage;
|
||||
import io.kestra.jdbc.runner.AbstractJdbcExecutionQueuedStateStore;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
@@ -28,8 +28,8 @@ public class SubmitQueuedCommand extends AbstractCommand {
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
private QueueInterface<Execution> executionQueue;
|
||||
@Named(QueueFactoryInterface.EXECUTION_COMMAND_NAMED)
|
||||
private QueueInterface<ExecutionCommand> executionCommandQueue;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
@@ -47,12 +47,11 @@ public class SubmitQueuedCommand extends AbstractCommand {
|
||||
return 1;
|
||||
}
|
||||
else if (queueType.get().equals("postgres") || queueType.get().equals("mysql") || queueType.get().equals("h2")) {
|
||||
var executionQueuedStorage = applicationContext.getBean(AbstractJdbcExecutionQueuedStorage.class);
|
||||
var concurrencyLimitService = applicationContext.getBean(ConcurrencyLimitService.class);
|
||||
var executionQueuedStorage = applicationContext.getBean(AbstractJdbcExecutionQueuedStateStore.class);
|
||||
|
||||
for (ExecutionQueued queued : executionQueuedStorage.getAllForAllTenants()) {
|
||||
Execution restart = concurrencyLimitService.unqueue(queued.getExecution(), State.Type.RUNNING);
|
||||
executionQueue.emit(restart);
|
||||
var executionCommand = Unqueue.from(queued.getExecution(), State.Type.RUNNING);
|
||||
executionCommandQueue.emit(executionCommand);
|
||||
cpt++;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package io.kestra.cli.commands.sys;
|
||||
|
||||
import io.kestra.cli.commands.sys.database.DatabaseCommand;
|
||||
import io.kestra.cli.commands.sys.statestore.StateStoreCommand;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
@@ -15,7 +15,6 @@ import picocli.CommandLine;
|
||||
ReindexCommand.class,
|
||||
DatabaseCommand.class,
|
||||
SubmitQueuedCommand.class,
|
||||
StateStoreCommand.class
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
package io.kestra.cli.commands.sys.statestore;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import lombok.SneakyThrows;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "state-store",
|
||||
description = "Manage Kestra State Store",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
StateStoreMigrateCommand.class,
|
||||
}
|
||||
)
|
||||
public class StateStoreCommand extends AbstractCommand {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
return App.runCli(new String[]{"sys", "state-store", "--help"});
|
||||
}
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
package io.kestra.cli.commands.sys.statestore;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
import io.kestra.core.storages.StateStore;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.utils.Slugify;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "migrate",
|
||||
description = "Migrate old state store files to use the new KV Store implementation.",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
public class StateStoreMigrateCommand extends AbstractCommand {
|
||||
@Inject
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
FlowRepositoryInterface flowRepository = this.applicationContext.getBean(FlowRepositoryInterface.class);
|
||||
StorageInterface storageInterface = this.applicationContext.getBean(StorageInterface.class);
|
||||
RunContextFactory runContextFactory = this.applicationContext.getBean(RunContextFactory.class);
|
||||
|
||||
flowRepository.findAllForAllTenants().stream().map(flow -> Map.entry(flow, List.of(
|
||||
URI.create("/" + flow.getNamespace().replace(".", "/") + "/" + Slugify.of(flow.getId()) + "/states"),
|
||||
URI.create("/" + flow.getNamespace().replace(".", "/") + "/states")
|
||||
))).map(potentialStateStoreUrisForAFlow -> Map.entry(potentialStateStoreUrisForAFlow.getKey(), potentialStateStoreUrisForAFlow.getValue().stream().flatMap(uri -> {
|
||||
try {
|
||||
return storageInterface.allByPrefix(potentialStateStoreUrisForAFlow.getKey().getTenantId(), potentialStateStoreUrisForAFlow.getKey().getNamespace(), uri, false).stream();
|
||||
} catch (IOException e) {
|
||||
return Stream.empty();
|
||||
}
|
||||
}).toList())).forEach(stateStoreFileUrisForAFlow -> stateStoreFileUrisForAFlow.getValue().forEach(stateStoreFileUri -> {
|
||||
Flow flow = stateStoreFileUrisForAFlow.getKey();
|
||||
String[] flowQualifierWithStateQualifiers = stateStoreFileUri.getPath().split("/states/");
|
||||
String[] statesUriPart = flowQualifierWithStateQualifiers[1].split("/");
|
||||
|
||||
String stateName = statesUriPart[0];
|
||||
String taskRunValue = statesUriPart.length > 2 ? statesUriPart[1] : null;
|
||||
String stateSubName = statesUriPart[statesUriPart.length - 1];
|
||||
boolean flowScoped = flowQualifierWithStateQualifiers[0].endsWith("/" + flow.getId());
|
||||
StateStore stateStore = new StateStore(runContextFactory.of(flow, Map.of()), false);
|
||||
|
||||
try (InputStream is = storageInterface.get(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri)) {
|
||||
stateStore.putState(flowScoped, stateName, stateSubName, taskRunValue, is.readAllBytes());
|
||||
storageInterface.delete(flow.getTenantId(), flow.getNamespace(), stateStoreFileUri);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
|
||||
stdOut("Successfully ran the state-store migration.");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
package io.kestra.cli.commands.templates;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.kestra.cli.commands.templates.namespaces.TemplateNamespaceCommand;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "template",
|
||||
description = "Manage templates",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TemplateNamespaceCommand.class,
|
||||
TemplateValidateCommand.class,
|
||||
TemplateExportCommand.class,
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateCommand extends AbstractCommand {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
return App.runCli(new String[]{"template", "--help"});
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package io.kestra.cli.commands.templates;
|
||||
|
||||
import io.kestra.cli.AbstractApiCommand;
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.HttpResponse;
|
||||
import io.micronaut.http.MediaType;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "export",
|
||||
description = "Export templates to a ZIP file",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateExportCommand extends AbstractApiCommand {
|
||||
private static final String DEFAULT_FILE_NAME = "templates.zip";
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@CommandLine.Option(names = {"--namespace"}, description = "The namespace of templates to export")
|
||||
public String namespace;
|
||||
|
||||
@CommandLine.Parameters(index = "0", description = "The directory to export the file to")
|
||||
public Path directory;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try(DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<Object> request = HttpRequest
|
||||
.GET(apiUri("/templates/export/by-query", tenantService.getTenantId(tenantId)) + (namespace != null ? "?namespace=" + namespace : ""))
|
||||
.accept(MediaType.APPLICATION_OCTET_STREAM);
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(this.requestOptions(request), byte[].class);
|
||||
Path zipFile = Path.of(directory.toString(), DEFAULT_FILE_NAME);
|
||||
zipFile.toFile().createNewFile();
|
||||
Files.write(zipFile, response.body());
|
||||
|
||||
stdOut("Exporting template(s) for namespace '" + namespace + "' successfully done !");
|
||||
} catch (HttpClientResponseException e) {
|
||||
AbstractValidateCommand.handleHttpException(e, "template");
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
package io.kestra.cli.commands.templates;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import jakarta.inject.Inject;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "validate",
|
||||
description = "Validate a template"
|
||||
)
|
||||
@TemplateEnabled
|
||||
public class TemplateValidateCommand extends AbstractValidateCommand {
|
||||
|
||||
@Inject
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
return this.call(
|
||||
Template.class,
|
||||
modelValidator,
|
||||
(Object object) -> {
|
||||
Template template = (Template) object;
|
||||
return template.getNamespace() + " / " + template.getId();
|
||||
},
|
||||
(Object object) -> Collections.emptyList(),
|
||||
(Object object) -> Collections.emptyList()
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
package io.kestra.cli.commands.templates.namespaces;
|
||||
|
||||
import io.kestra.cli.AbstractCommand;
|
||||
import io.kestra.cli.App;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "namespace",
|
||||
description = "Manage namespace templates",
|
||||
mixinStandardHelpOptions = true,
|
||||
subcommands = {
|
||||
TemplateNamespaceUpdateCommand.class,
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateNamespaceCommand extends AbstractCommand {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
return App.runCli(new String[]{"template", "namespace", "--help"});
|
||||
}
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
package io.kestra.cli.commands.templates.namespaces;
|
||||
|
||||
import io.kestra.cli.AbstractValidateCommand;
|
||||
import io.kestra.cli.commands.AbstractServiceNamespaceUpdateCommand;
|
||||
import io.kestra.cli.services.TenantIdSelectorService;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.templates.TemplateEnabled;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.micronaut.core.type.Argument;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.MutableHttpRequest;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.http.client.netty.DefaultHttpClient;
|
||||
import jakarta.inject.Inject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import picocli.CommandLine;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.util.List;
|
||||
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "update",
|
||||
description = "Update namespace templates",
|
||||
mixinStandardHelpOptions = true
|
||||
)
|
||||
@Slf4j
|
||||
@TemplateEnabled
|
||||
public class TemplateNamespaceUpdateCommand extends AbstractServiceNamespaceUpdateCommand {
|
||||
|
||||
@Inject
|
||||
private TenantIdSelectorService tenantService;
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
super.call();
|
||||
|
||||
try (var files = Files.walk(directory)) {
|
||||
List<Template> templates = files
|
||||
.filter(Files::isRegularFile)
|
||||
.filter(YamlParser::isValidExtension)
|
||||
.map(path -> YamlParser.parse(path.toFile(), Template.class))
|
||||
.toList();
|
||||
|
||||
if (templates.isEmpty()) {
|
||||
stdOut("No template found on '{}'", directory.toFile().getAbsolutePath());
|
||||
}
|
||||
|
||||
try (DefaultHttpClient client = client()) {
|
||||
MutableHttpRequest<List<Template>> request = HttpRequest
|
||||
.POST(apiUri("/templates/", tenantService.getTenantIdAndAllowEETenants(tenantId)) + namespace + "?delete=" + delete, templates);
|
||||
|
||||
List<UpdateResult> updated = client.toBlocking().retrieve(
|
||||
this.requestOptions(request),
|
||||
Argument.listOf(UpdateResult.class)
|
||||
);
|
||||
|
||||
stdOut(updated.size() + " template(s) for namespace '" + namespace + "' successfully updated !");
|
||||
updated.forEach(template -> stdOut("- " + template.getNamespace() + "." + template.getId()));
|
||||
} catch (HttpClientResponseException e) {
|
||||
AbstractValidateCommand.handleHttpException(e, "template");
|
||||
|
||||
return 1;
|
||||
}
|
||||
} catch (ConstraintViolationException e) {
|
||||
AbstractValidateCommand.handleException(e, "template");
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -6,13 +6,17 @@ import io.kestra.core.models.flows.FlowWithPath;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import io.kestra.core.services.PluginDefaultService;
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.scheduling.io.watch.FileWatchConfiguration;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.annotation.PreDestroy;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
@@ -25,6 +29,8 @@ import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.throwConsumer;
|
||||
|
||||
@Singleton
|
||||
@Slf4j
|
||||
@Requires(property = "micronaut.io.watch.enabled", value = "true")
|
||||
@@ -37,6 +43,9 @@ public class FileChangedEventListener {
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepositoryInterface;
|
||||
|
||||
@Inject
|
||||
private FlowService flowService;
|
||||
|
||||
@Inject
|
||||
private PluginDefaultService pluginDefaultService;
|
||||
|
||||
@@ -44,13 +53,12 @@ public class FileChangedEventListener {
|
||||
private ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
protected FlowListenersInterface flowListeners;
|
||||
@Named(QueueFactoryInterface.FLOW_NAMED) private QueueInterface<FlowInterface> flowQueue;
|
||||
|
||||
FlowFilesManager flowFilesManager;
|
||||
private FlowFilesManager flowFilesManager;
|
||||
private Runnable cancellation;
|
||||
|
||||
private List<FlowWithPath> flows = new CopyOnWriteArrayList<>();
|
||||
|
||||
private boolean isStarted = false;
|
||||
private final List<FlowWithPath> flows = new CopyOnWriteArrayList<>();
|
||||
|
||||
@Inject
|
||||
public FileChangedEventListener(@Nullable FileWatchConfiguration fileWatchConfiguration, @Nullable WatchService watchService) {
|
||||
@@ -60,41 +68,38 @@ public class FileChangedEventListener {
|
||||
|
||||
public void startListeningFromConfig() throws IOException, InterruptedException {
|
||||
if (fileWatchConfiguration != null && fileWatchConfiguration.isEnabled()) {
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface);
|
||||
this.flowFilesManager = new LocalFlowFileWatcher(flowRepositoryInterface, flowService);
|
||||
List<Path> paths = fileWatchConfiguration.getPaths();
|
||||
this.setup(paths);
|
||||
|
||||
flowListeners.run();
|
||||
// Init existing flows not already in files
|
||||
flowListeners.listen(flows -> {
|
||||
if (!isStarted) {
|
||||
for (FlowInterface flow : flows) {
|
||||
if (this.flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.uidWithoutRevision()))) {
|
||||
flowToFile(flow, this.buildPath(flow));
|
||||
this.flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
|
||||
}
|
||||
}
|
||||
this.isStarted = true;
|
||||
}
|
||||
flowRepositoryInterface.findAllForAllTenants().forEach(flow -> {
|
||||
flowToFile(flow, this.buildPath(flow));
|
||||
flows.add(FlowWithPath.of(flow, this.buildPath(flow).toString()));
|
||||
});
|
||||
|
||||
// Listen for new/updated/deleted flows
|
||||
flowListeners.listen((current, previous) -> {
|
||||
// If deleted
|
||||
if (current.isDeleted()) {
|
||||
this.flows.stream().filter(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
deleteFile(Paths.get(flowWithPath.getPath()));
|
||||
});
|
||||
this.flows.removeIf(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision()));
|
||||
flowQueue.receive(either -> {
|
||||
if (either.isRight()) {
|
||||
log.error("Unable to deserialize a flow event: {}", either.getRight().getMessage());
|
||||
} else {
|
||||
// if updated/created
|
||||
Optional<FlowWithPath> flowWithPath = this.flows.stream().filter(fwp -> fwp.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst();
|
||||
if (flowWithPath.isPresent()) {
|
||||
flowToFile(current, Paths.get(flowWithPath.get().getPath()));
|
||||
FlowInterface current = either.getLeft();
|
||||
// If deleted
|
||||
if (current.isDeleted()) {
|
||||
this.flows.stream().filter(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
deleteFile(Paths.get(flowWithPath.getPath()));
|
||||
});
|
||||
this.flows.removeIf(flowWithPath -> flowWithPath.uidWithoutRevision().equals(current.uidWithoutRevision()));
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(current, this.buildPath(current).toString()));
|
||||
flowToFile(current, null);
|
||||
// if updated/created
|
||||
Optional<FlowWithPath> flowWithPath = this.flows.stream().filter(fwp -> fwp.uidWithoutRevision().equals(current.uidWithoutRevision())).findFirst();
|
||||
if (flowWithPath.isPresent()) {
|
||||
flowToFile(current, Paths.get(flowWithPath.get().getPath()));
|
||||
} else {
|
||||
flows.add(FlowWithPath.of(current, this.buildPath(current).toString()));
|
||||
flowToFile(current, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -105,6 +110,11 @@ public class FileChangedEventListener {
|
||||
}
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
void close() {
|
||||
cancellation.run();
|
||||
}
|
||||
|
||||
public void startListening(List<Path> paths) throws IOException, InterruptedException {
|
||||
for (Path path : paths) {
|
||||
path.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY);
|
||||
@@ -158,10 +168,10 @@ public class FileChangedEventListener {
|
||||
flows.stream()
|
||||
.filter(flow -> flow.getPath().equals(filePath.toString()))
|
||||
.findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
.ifPresent(throwConsumer(flowWithPath -> {
|
||||
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
|
||||
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
|
||||
});
|
||||
}));
|
||||
} catch (IOException e) {
|
||||
log.error("Error reading file: {}", entry, e);
|
||||
}
|
||||
@@ -171,10 +181,10 @@ public class FileChangedEventListener {
|
||||
flows.stream()
|
||||
.filter(flow -> flow.getPath().equals(filePath.toString()))
|
||||
.findFirst()
|
||||
.ifPresent(flowWithPath -> {
|
||||
.ifPresent(throwConsumer(flowWithPath -> {
|
||||
flowFilesManager.deleteFlow(flowWithPath.getTenantId(), flowWithPath.getNamespace(), flowWithPath.getId());
|
||||
this.flows.removeIf(fwp -> fwp.uidWithoutRevision().equals(flowWithPath.uidWithoutRevision()));
|
||||
});
|
||||
}));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
@@ -211,7 +221,11 @@ public class FileChangedEventListener {
|
||||
|
||||
if (flow.isPresent() && flows.stream().noneMatch(flowWithPath -> flowWithPath.uidWithoutRevision().equals(flow.get().uidWithoutRevision()))) {
|
||||
flows.add(FlowWithPath.of(flow.get(), file.toString()));
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(file), content));
|
||||
try {
|
||||
flowFilesManager.createOrUpdateFlow(GenericFlow.fromYaml(getTenantIdFromPath(file), content));
|
||||
} catch (Exception e) {
|
||||
log.error("Unexpected error while watching flows", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
|
||||
@@ -2,12 +2,13 @@ package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
|
||||
public interface FlowFilesManager {
|
||||
|
||||
FlowWithSource createOrUpdateFlow(GenericFlow flow);
|
||||
FlowWithSource createOrUpdateFlow(GenericFlow flow) throws Exception;
|
||||
|
||||
void deleteFlow(FlowWithSource toDelete);
|
||||
void deleteFlow(FlowWithSource toDelete) throws QueueException;
|
||||
|
||||
void deleteFlow(String tenantId, String namespace, String id);
|
||||
void deleteFlow(String tenantId, String namespace, String id) throws QueueException;
|
||||
}
|
||||
|
||||
@@ -2,33 +2,41 @@ package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import static io.kestra.core.utils.Rethrow.*;
|
||||
|
||||
@Slf4j
|
||||
public class LocalFlowFileWatcher implements FlowFilesManager {
|
||||
private final FlowRepositoryInterface flowRepository;
|
||||
private final FlowService flowService;
|
||||
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository) {
|
||||
public LocalFlowFileWatcher(FlowRepositoryInterface flowRepository, FlowService flowService) {
|
||||
this.flowRepository = flowRepository;
|
||||
this.flowService = flowService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) {
|
||||
public FlowWithSource createOrUpdateFlow(final GenericFlow flow) throws Exception {
|
||||
return flowRepository.findById(flow.getTenantId(), flow.getNamespace(), flow.getId())
|
||||
.map(previous -> flowRepository.update(flow, previous))
|
||||
.orElseGet(() -> flowRepository.create(flow));
|
||||
.map(throwFunction(previous -> flowService.update(flow, previous)))
|
||||
.orElseGet(throwSupplier(() -> flowService.create(flow)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFlow(FlowWithSource toDelete) {
|
||||
flowRepository.findByIdWithSource(toDelete.getTenantId(), toDelete.getNamespace(), toDelete.getId()).ifPresent(flowRepository::delete);
|
||||
public void deleteFlow(FlowWithSource toDelete) throws QueueException {
|
||||
flowRepository.findByIdWithSource(toDelete.getTenantId(), toDelete.getNamespace(), toDelete.getId())
|
||||
.ifPresent(throwConsumer(flow -> flowService.delete(flow)));
|
||||
log.info("Flow {} has been deleted", toDelete.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFlow(String tenantId, String namespace, String id) {
|
||||
flowRepository.findByIdWithSource(tenantId, namespace, id).ifPresent(flowRepository::delete);
|
||||
public void deleteFlow(String tenantId, String namespace, String id) throws QueueException {
|
||||
flowRepository.findByIdWithSource(tenantId, namespace, id)
|
||||
.ifPresent(throwConsumer(flow -> flowService.delete(flow)));
|
||||
log.info("Flow {} has been deleted", id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
class FlowDotCommandTest {
|
||||
@Test
|
||||
void run() {
|
||||
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("flows/same/first.yaml");
|
||||
URL directory = FlowDotCommandTest.class.getClassLoader().getResource("flows/same/first.yaml");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class FlowExpandCommandTest {
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
void run() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
|
||||
String[] args = {
|
||||
"src/test/resources/helper/include.yaml"
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowExpandCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).isEqualTo("id: include\n" +
|
||||
"namespace: io.kestra.cli\n" +
|
||||
"\n" +
|
||||
"# The list of tasks\n" +
|
||||
"tasks:\n" +
|
||||
"- id: t1\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: \"Lorem ipsum dolor sit amet\"\n" +
|
||||
"- id: t2\n" +
|
||||
" type: io.kestra.plugin.core.debug.Return\n" +
|
||||
" format: |\n" +
|
||||
" Lorem ipsum dolor sit amet\n" +
|
||||
" Lorem ipsum dolor sit amet\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -61,7 +61,6 @@ class FlowValidateCommandTest {
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("✓ - system / warning");
|
||||
assertThat(out.toString()).contains("⚠ - tasks[0] is deprecated");
|
||||
assertThat(out.toString()).contains("ℹ - io.kestra.core.tasks.log.Log is replaced by io.kestra.plugin.core.log.Log");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
package io.kestra.cli.commands.flows;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateValidateCommandTest {
|
||||
@Test
|
||||
void runLocal() {
|
||||
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalids/empty.yaml");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
String[] args = {
|
||||
"--local",
|
||||
directory.getPath()
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void runServer() {
|
||||
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalids/empty.yaml");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--plugins",
|
||||
"/tmp", // pass this arg because it can cause failure
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
directory.getPath()
|
||||
};
|
||||
Integer call = PicocliRunner.call(FlowValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse flow");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
package io.kestra.cli.commands.sys.statestore;
|
||||
|
||||
import io.kestra.cli.commands.sys.database.DatabaseCommand;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class StateStoreCommandTest {
|
||||
@Test
|
||||
void runWithNoParam() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(StateStoreCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra sys state-store");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
package io.kestra.cli.commands.sys.statestore;
|
||||
|
||||
import io.kestra.core.exceptions.MigrationRequiredException;
|
||||
import io.kestra.core.exceptions.ResourceExpiredException;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
import io.kestra.core.storages.StateStore;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.utils.Hashing;
|
||||
import io.kestra.core.utils.Slugify;
|
||||
import io.kestra.plugin.core.log.Log;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class StateStoreMigrateCommandTest {
|
||||
@Test
|
||||
void runMigration() throws IOException, ResourceExpiredException {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).environments("test").start()) {
|
||||
FlowRepositoryInterface flowRepository = ctx.getBean(FlowRepositoryInterface.class);
|
||||
|
||||
Flow flow = Flow.builder()
|
||||
.tenantId("my-tenant")
|
||||
.id("a-flow")
|
||||
.namespace("some.valid.namespace." + ((int) (Math.random() * 1000000)))
|
||||
.tasks(List.of(Log.builder().id("log").type(Log.class.getName()).message("logging").build()))
|
||||
.build();
|
||||
flowRepository.create(GenericFlow.of(flow));
|
||||
|
||||
StorageInterface storage = ctx.getBean(StorageInterface.class);
|
||||
String tenantId = flow.getTenantId();
|
||||
URI oldStateStoreUri = URI.create("/" + flow.getNamespace().replace(".", "/") + "/" + Slugify.of("a-flow") + "/states/my-state/" + Hashing.hashToString("my-taskrun-value") + "/sub-name");
|
||||
storage.put(
|
||||
tenantId,
|
||||
flow.getNamespace(),
|
||||
oldStateStoreUri,
|
||||
new ByteArrayInputStream("my-value".getBytes())
|
||||
);
|
||||
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isTrue();
|
||||
|
||||
RunContext runContext = ctx.getBean(RunContextFactory.class).of(flow, Map.of());
|
||||
StateStore stateStore = new StateStore(runContext, true);
|
||||
Assertions.assertThrows(MigrationRequiredException.class, () -> stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value"));
|
||||
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(StateStoreMigrateCommand.class, ctx, args);
|
||||
|
||||
assertThat(new String(stateStore.getState(true, "my-state", "sub-name", "my-taskrun-value").readAllBytes())).isEqualTo("my-value");
|
||||
assertThat(storage.exists(tenantId, flow.getNamespace(), oldStateStoreUri)).isFalse();
|
||||
|
||||
assertThat(call).isZero();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
package io.kestra.cli.commands.templates;
|
||||
|
||||
import io.kestra.cli.commands.templates.namespaces.TemplateNamespaceUpdateCommand;
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateExportCommandTest {
|
||||
@Test
|
||||
void run() throws IOException {
|
||||
URL directory = TemplateExportCommandTest.class.getClassLoader().getResource("templates");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
// we use the update command to add templates to extract
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.tests",
|
||||
directory.getPath(),
|
||||
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
assertThat(out.toString()).contains("3 template(s)");
|
||||
|
||||
// then we export them
|
||||
String[] exportArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"--namespace",
|
||||
"io.kestra.tests",
|
||||
"/tmp",
|
||||
};
|
||||
PicocliRunner.call(TemplateExportCommand.class, ctx, exportArgs);
|
||||
File file = new File("/tmp/templates.zip");
|
||||
assertThat(file.exists()).isTrue();
|
||||
ZipFile zipFile = new ZipFile(file);
|
||||
assertThat(zipFile.stream().count()).isEqualTo(3L);
|
||||
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package io.kestra.cli.commands.templates;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateValidateCommandTest {
|
||||
@Test
|
||||
void runLocal() {
|
||||
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalidsTemplates/template.yml");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
|
||||
String[] args = {
|
||||
"--local",
|
||||
directory.getPath()
|
||||
};
|
||||
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse template");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void runServer() {
|
||||
URL directory = TemplateValidateCommandTest.class.getClassLoader().getResource("invalidsTemplates/template.yml");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
directory.getPath()
|
||||
};
|
||||
Integer call = PicocliRunner.call(TemplateValidateCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isEqualTo(1);
|
||||
assertThat(out.toString()).contains("Unable to parse template");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
package io.kestra.cli.commands.templates.namespaces;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateNamespaceCommandTest {
|
||||
@Test
|
||||
void runWithNoParam() {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.builder().deduceEnvironment(false).start()) {
|
||||
String[] args = {};
|
||||
Integer call = PicocliRunner.call(TemplateNamespaceCommand.class, ctx, args);
|
||||
|
||||
assertThat(call).isZero();
|
||||
assertThat(out.toString()).contains("Usage: kestra template namespace");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
package io.kestra.cli.commands.templates.namespaces;
|
||||
|
||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.context.env.Environment;
|
||||
import io.micronaut.runtime.server.EmbeddedServer;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class TemplateNamespaceUpdateCommandTest {
|
||||
@Test
|
||||
void run() {
|
||||
URL directory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("templates");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.tests",
|
||||
directory.getPath(),
|
||||
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("3 template(s)");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void invalid() {
|
||||
URL directory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("invalidsTemplates");
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setErr(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.tests",
|
||||
directory.getPath(),
|
||||
|
||||
};
|
||||
Integer call = PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
// assertThat(call, is(1));
|
||||
assertThat(out.toString()).contains("Unable to parse templates");
|
||||
assertThat(out.toString()).contains("must not be empty");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void runNoDelete() {
|
||||
URL directory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("templates");
|
||||
URL subDirectory = TemplateNamespaceUpdateCommandTest.class.getClassLoader().getResource("templates/templatesSubFolder");
|
||||
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(out));
|
||||
|
||||
try (ApplicationContext ctx = ApplicationContext.run(Map.of("kestra.templates.enabled", "true"), Environment.CLI, Environment.TEST)) {
|
||||
|
||||
EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class);
|
||||
embeddedServer.start();
|
||||
|
||||
String[] args = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.tests",
|
||||
directory.getPath(),
|
||||
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, args);
|
||||
|
||||
assertThat(out.toString()).contains("3 template(s)");
|
||||
|
||||
String[] newArgs = {
|
||||
"--server",
|
||||
embeddedServer.getURL().toString(),
|
||||
"--user",
|
||||
"myuser:pass:word",
|
||||
"io.kestra.tests",
|
||||
subDirectory.getPath(),
|
||||
"--no-delete"
|
||||
|
||||
};
|
||||
PicocliRunner.call(TemplateNamespaceUpdateCommand.class, ctx, newArgs);
|
||||
|
||||
assertThat(out.toString()).contains("1 template(s)");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
package io.kestra.cli.services;
|
||||
|
||||
import io.kestra.core.junit.annotations.FlakyTest;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.jupiter.api.*;
|
||||
@@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@MicronautTest(environments = {"test", "file-watch"}, transactional = false)
|
||||
@KestraTest(environments = {"test", "file-watch"})
|
||||
class FileChangedEventListenerTest {
|
||||
public static final String FILE_WATCH = "build/file-watch";
|
||||
@Inject
|
||||
|
||||
@@ -3,8 +3,8 @@ namespace: system
|
||||
|
||||
tasks:
|
||||
- id: deprecated
|
||||
type: io.kestra.plugin.core.debug.Echo
|
||||
format: Hello World
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Hello World
|
||||
- id: alias
|
||||
type: io.kestra.core.tasks.log.Log
|
||||
message: I'm an alias
|
||||
@@ -77,6 +77,7 @@ dependencies {
|
||||
testImplementation project(':worker')
|
||||
testImplementation project(':scheduler')
|
||||
testImplementation project(':executor')
|
||||
testImplementation project(':indexer')
|
||||
|
||||
testImplementation "io.micronaut:micronaut-http-client"
|
||||
testImplementation "io.micronaut:micronaut-http-server-netty"
|
||||
|
||||
@@ -4,7 +4,6 @@ import io.kestra.core.models.dashboards.Dashboard;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.PluginDefault;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
@@ -36,7 +35,6 @@ public class JsonSchemaCache {
|
||||
public JsonSchemaCache(final JsonSchemaGenerator jsonSchemaGenerator) {
|
||||
this.jsonSchemaGenerator = Objects.requireNonNull(jsonSchemaGenerator, "JsonSchemaGenerator cannot be null");
|
||||
registerClassForType(SchemaType.FLOW, Flow.class);
|
||||
registerClassForType(SchemaType.TEMPLATE, Template.class);
|
||||
registerClassForType(SchemaType.TASK, Task.class);
|
||||
registerClassForType(SchemaType.TRIGGER, AbstractTrigger.class);
|
||||
registerClassForType(SchemaType.PLUGINDEFAULT, PluginDefault.class);
|
||||
|
||||
@@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.annotations.Plugin;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.conditions.Condition;
|
||||
import io.kestra.core.models.conditions.ScheduleCondition;
|
||||
import io.kestra.core.models.dashboards.DataFilter;
|
||||
import io.kestra.core.models.dashboards.DataFilterKPI;
|
||||
import io.kestra.core.models.dashboards.charts.Chart;
|
||||
@@ -64,7 +63,7 @@ import static io.kestra.core.serializers.JacksonMapper.MAP_TYPE_REFERENCE;
|
||||
@Singleton
|
||||
@Slf4j
|
||||
public class JsonSchemaGenerator {
|
||||
|
||||
|
||||
private static final List<Class<?>> TYPES_RESOLVED_AS_STRING = List.of(Duration.class, LocalTime.class, LocalDate.class, LocalDateTime.class, ZonedDateTime.class, OffsetDateTime.class, OffsetTime.class);
|
||||
private static final List<Class<?>> SUBTYPE_RESOLUTION_EXCLUSION_FOR_PLUGIN_SCHEMA = List.of(Task.class, AbstractTrigger.class);
|
||||
|
||||
@@ -277,8 +276,8 @@ public class JsonSchemaGenerator {
|
||||
.with(Option.DEFINITION_FOR_MAIN_SCHEMA)
|
||||
.with(Option.PLAIN_DEFINITION_KEYS)
|
||||
.with(Option.ALLOF_CLEANUP_AT_THE_END);
|
||||
|
||||
// HACK: Registered a custom JsonUnwrappedDefinitionProvider prior to the JacksonModule
|
||||
|
||||
// HACK: Registered a custom JsonUnwrappedDefinitionProvider prior to the JacksonModule
|
||||
// to be able to return an CustomDefinition with an empty node when the ResolvedType can't be found.
|
||||
builder.forTypesInGeneral().withCustomDefinitionProvider(new JsonUnwrappedDefinitionProvider(){
|
||||
@Override
|
||||
@@ -320,7 +319,7 @@ public class JsonSchemaGenerator {
|
||||
// inline some type
|
||||
builder.forTypesInGeneral()
|
||||
.withCustomDefinitionProvider(new CustomDefinitionProviderV2() {
|
||||
|
||||
|
||||
@Override
|
||||
public CustomDefinition provideCustomSchemaDefinition(ResolvedType javaType, SchemaGenerationContext context) {
|
||||
if (javaType.isInstanceOf(Map.class) || javaType.isInstanceOf(Enum.class)) {
|
||||
@@ -688,15 +687,6 @@ public class JsonSchemaGenerator {
|
||||
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
|
||||
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
|
||||
.toList();
|
||||
} else if (declaredType.getErasedType() == ScheduleCondition.class) {
|
||||
return getRegisteredPlugins()
|
||||
.stream()
|
||||
.flatMap(registeredPlugin -> registeredPlugin.getConditions().stream())
|
||||
.filter(ScheduleCondition.class::isAssignableFrom)
|
||||
.filter(p -> allowedPluginTypes.isEmpty() || allowedPluginTypes.contains(p.getName()))
|
||||
.filter(Predicate.not(io.kestra.core.models.Plugin::isInternal))
|
||||
.flatMap(clz -> safelyResolveSubtype(declaredType, clz, typeContext).stream())
|
||||
.toList();
|
||||
} else if (declaredType.getErasedType() == TaskRunner.class) {
|
||||
return getRegisteredPlugins()
|
||||
.stream()
|
||||
|
||||
@@ -3,6 +3,7 @@ package io.kestra.core.docs;
|
||||
import io.kestra.core.models.annotations.PluginSubGroup;
|
||||
import io.kestra.core.plugins.RegisteredPlugin;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@@ -117,10 +118,17 @@ public class Plugin {
|
||||
.filter(not(io.kestra.core.models.Plugin::isInternal))
|
||||
.filter(clazzFilter)
|
||||
.filter(c -> !c.getName().startsWith("org.kestra."))
|
||||
.map(c -> new PluginElementMetadata(c.getName(), io.kestra.core.models.Plugin.isDeprecated(c) ? true : null))
|
||||
.map(c -> {
|
||||
Schema schema = c.getAnnotation(Schema.class);
|
||||
|
||||
var title = Optional.ofNullable(schema).map(Schema::title).filter(t -> !t.isEmpty()).orElse(null);
|
||||
var description = Optional.ofNullable(schema).map(Schema::description).filter(d -> !d.isEmpty()).orElse(null);
|
||||
var deprecated = io.kestra.core.models.Plugin.isDeprecated(c) ? true : null;
|
||||
|
||||
return new PluginElementMetadata(c.getName(), deprecated, title, description);
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
public record PluginElementMetadata(String cls, Boolean deprecated) {
|
||||
}
|
||||
public record PluginElementMetadata(String cls, Boolean deprecated, String title, String description) {}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import io.kestra.core.utils.Enums;
|
||||
|
||||
public enum SchemaType {
|
||||
FLOW,
|
||||
TEMPLATE,
|
||||
TASK,
|
||||
TRIGGER,
|
||||
PLUGINDEFAULT,
|
||||
|
||||
85
core/src/main/java/io/kestra/core/events/EventId.java
Normal file
85
core/src/main/java/io/kestra/core/events/EventId.java
Normal file
@@ -0,0 +1,85 @@
|
||||
package io.kestra.core.events;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonRawValue;
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
import com.fasterxml.uuid.Generators;
|
||||
import com.fasterxml.uuid.impl.TimeBasedEpochGenerator;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Strongly-typed wrapper around a UUIDv7 identifier used for Kestra events.
|
||||
* <p>
|
||||
* UUIDv7 values are time-ordered, which allows lexicographic and unsigned
|
||||
* 128-bit comparison to reflect chronological ordering.
|
||||
*/
|
||||
public record EventId(@JsonValue UUID value) implements Comparable<EventId> {
|
||||
|
||||
// Generator that generates UUID using version 7 (Unix Epoch time+random based).
|
||||
private static final TimeBasedEpochGenerator GENERATOR = Generators.timeBasedEpochGenerator();
|
||||
|
||||
public EventId {
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("EventId UUID cannot be null");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method for creating a new {@link EventId}.
|
||||
*
|
||||
* @return a new {@link EventId}.
|
||||
*/
|
||||
public static EventId create() {
|
||||
return new EventId(GENERATOR.generate());
|
||||
}
|
||||
|
||||
@JsonCreator
|
||||
public static EventId fromString(String str) {
|
||||
return new EventId(UUID.fromString(str));
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two UUIDv7 values chronologically. UUIDv7 ordering corresponds
|
||||
* to treating the UUID as a 128-bit unsigned integer.
|
||||
*
|
||||
* @param other the other {@code EventId} to compare against
|
||||
* @return a negative value if this ID is older; zero if equal; positive if newer
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(EventId other) {
|
||||
int cmp = Long.compareUnsigned(this.value.getMostSignificantBits(), other.value.getMostSignificantBits());
|
||||
if (cmp != 0) return cmp;
|
||||
return Long.compareUnsigned(this.value.getLeastSignificantBits(), other.value.getLeastSignificantBits());
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether this ID is chronologically newer (greater) than the given ID.
|
||||
*
|
||||
* @param other the ID to compare against
|
||||
* @return {@code true} if this ID is newer; {@code false} otherwise
|
||||
*/
|
||||
public boolean isNewerThan(final EventId other) {
|
||||
return this.compareTo(other) > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether this ID is chronologically older (less) than the given ID.
|
||||
*
|
||||
* @param other the ID to compare against
|
||||
* @return {@code true} if this ID is older; {@code false} otherwise
|
||||
*/
|
||||
public boolean isOlderThan(final EventId other) {
|
||||
return this.compareTo(other) < 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string representation of the underlying UUID.
|
||||
*
|
||||
* @return the UUID string
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return value.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,17 @@
|
||||
package io.kestra.core.exceptions;
|
||||
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
|
||||
import java.io.Serial;
|
||||
|
||||
/**
|
||||
* Exception that can be thrown when a Flow is not found.
|
||||
*/
|
||||
public class FlowNotFoundException extends NotFoundException {
|
||||
@Serial
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static final String FLOW_NOT_FOUND_MESSAGE = "Unable to find flow %s.%s.%s revision %s for execution %s";
|
||||
|
||||
/**
|
||||
* Creates a new {@link FlowNotFoundException} instance.
|
||||
@@ -20,4 +28,8 @@ public class FlowNotFoundException extends NotFoundException {
|
||||
public FlowNotFoundException(final String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public FlowNotFoundException(final Execution execution) {
|
||||
super(FLOW_NOT_FOUND_MESSAGE.formatted(execution.getTenantId(), execution.getNamespace(), execution.getFlowId(), execution.getFlowRevision(), execution.getId()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
|
||||
import java.time.Instant;
|
||||
|
||||
public record ChangeTaskRunState(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
String taskRunId,
|
||||
State.Type state) implements ExecutionCommand {
|
||||
public static ChangeTaskRunState from(Execution execution, String taskRunId, State.Type state) {
|
||||
return new ChangeTaskRunState(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
taskRunId,
|
||||
state
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import com.fasterxml.jackson.annotation.*;
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.utils.Enums;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXISTING_PROPERTY, property = "type", visible = true)
|
||||
@JsonSubTypes({
|
||||
@JsonSubTypes.Type(value = ChangeTaskRunState.class, name = "CHANGE_TASK_RUN_STATE"),
|
||||
@JsonSubTypes.Type(value = ForceRun.class, name = "FORCE_RUN"),
|
||||
@JsonSubTypes.Type(value = Pause.class, name = "PAUSE"),
|
||||
@JsonSubTypes.Type(value = Replay.class, name = "REPLAY"),
|
||||
@JsonSubTypes.Type(value = Restart.class, name = "RESTART"),
|
||||
@JsonSubTypes.Type(value = Resume.class, name = "RESUME"),
|
||||
@JsonSubTypes.Type(value = ResumeFromBreakpoint.class, name = "RESUME_FROM_BREAKPOINT"),
|
||||
@JsonSubTypes.Type(value = Unqueue.class, name = "UNQUEUE"),
|
||||
@JsonSubTypes.Type(value = UpdateLabels.class, name = "UPDATE_LABELS"),
|
||||
@JsonSubTypes.Type(value = UpdateStatus.class, name = "UPDATE_STATUS"),
|
||||
@JsonSubTypes.Type(value = ExecutionCommand.Invalid.class, name = "INVALID"),
|
||||
})
|
||||
public interface ExecutionCommand extends HasUID {
|
||||
/**
|
||||
* @return the tenant id
|
||||
*/
|
||||
String tenantId();
|
||||
|
||||
/**
|
||||
* @return the namespace
|
||||
*/
|
||||
String namespace();
|
||||
|
||||
/**
|
||||
* @return the flow id
|
||||
*/
|
||||
String flowId();
|
||||
|
||||
/**
|
||||
* @return the execution id
|
||||
*/
|
||||
String executionId();
|
||||
|
||||
/**
|
||||
* @return the event timestamp.
|
||||
*/
|
||||
Instant timestamp();
|
||||
|
||||
/**
|
||||
* The event unique identifier.
|
||||
* <p>
|
||||
* Can be used to de-duplicate events or to correlate the event with an executor event.
|
||||
*
|
||||
* @return the event identifier.
|
||||
*/
|
||||
EventId eventId();
|
||||
|
||||
/**
|
||||
* @return the event type
|
||||
*/
|
||||
@JsonProperty
|
||||
default ExecutionCommandType type() {
|
||||
return Enums.fromClassName(this, ExecutionCommandType.class);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
@Override
|
||||
default String uid() {
|
||||
return IdUtils.fromParts(this.tenantId(), this.namespace(), this.flowId(), this.executionId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an invalid execution event.
|
||||
* Used for best effort deserialization of unexpected events due to serialization issue or removal of a supported event type.
|
||||
*/
|
||||
record Invalid(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
Map<String, Object> properties
|
||||
) implements ExecutionCommand {
|
||||
|
||||
@JsonCreator
|
||||
public Invalid(@JsonProperty("id") String tenantId,
|
||||
@JsonProperty("namespace") String namespace,
|
||||
@JsonProperty("flowId") String flowId,
|
||||
@JsonProperty("executionId") String executionId,
|
||||
@JsonProperty("timestamp") Instant timestamp,
|
||||
@JsonProperty("eventId") EventId eventId) {
|
||||
this(tenantId, namespace, flowId, executionId, timestamp, eventId, new HashMap<>());
|
||||
}
|
||||
|
||||
@JsonAnySetter
|
||||
public void addProperty(String key, Object value) {
|
||||
this.properties.put(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import io.kestra.core.utils.Enums;
|
||||
|
||||
public enum ExecutionCommandType {
|
||||
CHANGE_TASK_RUN_STATE,
|
||||
FORCE_RUN,
|
||||
PAUSE,
|
||||
REPLAY,
|
||||
RESTART,
|
||||
RESUME,
|
||||
RESUME_FROM_BREAKPOINT,
|
||||
UNQUEUE,
|
||||
UPDATE_LABELS,
|
||||
UPDATE_STATUS,
|
||||
// ERROR
|
||||
INVALID;
|
||||
|
||||
@JsonCreator
|
||||
static ExecutionCommandType from(final String s) {
|
||||
return Enums.getForNameIgnoreCase(s, ExecutionCommandType.class, INVALID);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
|
||||
import java.time.Instant;
|
||||
|
||||
public record ForceRun(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId) implements ExecutionCommand {
|
||||
|
||||
public static ForceRun from(Execution execution) {
|
||||
return new ForceRun(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create()
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
|
||||
import java.time.Instant;
|
||||
|
||||
public record Pause(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId) implements ExecutionCommand {
|
||||
public static Pause from(Execution execution) {
|
||||
return new Pause(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create()
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Optional;
|
||||
|
||||
public record Replay(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
@Nullable String taskRunId,
|
||||
@Nullable Integer revision,
|
||||
Optional<String> breakpoints) implements ExecutionCommand {
|
||||
public static Replay from(Execution execution, @Nullable String taskRunId, @Nullable Integer revision, Optional<String> breakpoints) {
|
||||
return new Replay(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
taskRunId,
|
||||
revision,
|
||||
breakpoints
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
||||
import java.time.Instant;
|
||||
|
||||
public record Restart(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
@Nullable Integer revision) implements ExecutionCommand {
|
||||
public static Restart from(Execution execution, Integer revision) {
|
||||
return new Restart(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
revision
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.plugin.core.flow.Pause;
|
||||
import jakarta.annotation.Nullable;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Map;
|
||||
|
||||
public record Resume(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
Pause.Resumed resumed,
|
||||
@Nullable Map<String, Object> resumeInputs) implements ExecutionCommand {
|
||||
public static Resume from(Execution execution, Pause.Resumed resumed) {
|
||||
return new Resume(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
resumed,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
public static Resume from(Execution execution, Pause.Resumed resumed, @Nullable Map<String, Object> resumeInputs) {
|
||||
return new Resume(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
resumed,
|
||||
resumeInputs
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Optional;
|
||||
|
||||
public record ResumeFromBreakpoint(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
Optional<String> breakpoints) implements ExecutionCommand {
|
||||
public static ResumeFromBreakpoint from(Execution execution, Optional<String> breakpoints) {
|
||||
return new ResumeFromBreakpoint(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
breakpoints
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
|
||||
import java.time.Instant;
|
||||
|
||||
public record Unqueue(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
@Nullable State.Type state) implements ExecutionCommand {
|
||||
public static Unqueue from(Execution execution, @Nullable State.Type state) {
|
||||
return new Unqueue(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
state
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
|
||||
public record UpdateLabels(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
List<Label> labels) implements ExecutionCommand {
|
||||
public static UpdateLabels from(Execution execution, List<Label> labels) {
|
||||
return new UpdateLabels(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
labels
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package io.kestra.core.executor.command;
|
||||
|
||||
import io.kestra.core.events.EventId;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
|
||||
import java.time.Instant;
|
||||
|
||||
public record UpdateStatus(String tenantId,
|
||||
String namespace,
|
||||
String flowId,
|
||||
String executionId,
|
||||
Instant timestamp,
|
||||
EventId eventId,
|
||||
State.Type state) implements ExecutionCommand {
|
||||
public static UpdateStatus from(Execution execution, State.Type state) {
|
||||
return new UpdateStatus(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getId(),
|
||||
Instant.now(),
|
||||
EventId.create(),
|
||||
state
|
||||
);
|
||||
}
|
||||
}
|
||||
27
core/src/main/java/io/kestra/core/lock/Lock.java
Normal file
27
core/src/main/java/io/kestra/core/lock/Lock.java
Normal file
@@ -0,0 +1,27 @@
|
||||
package io.kestra.core.lock;
|
||||
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
@Getter
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Lock implements HasUID {
|
||||
private String category;
|
||||
private String id;
|
||||
private String owner;
|
||||
private Instant createdAt;
|
||||
|
||||
@Override
|
||||
public String uid() {
|
||||
return IdUtils.fromParts(this.category, this.id);
|
||||
}
|
||||
}
|
||||
13
core/src/main/java/io/kestra/core/lock/LockException.java
Normal file
13
core/src/main/java/io/kestra/core/lock/LockException.java
Normal file
@@ -0,0 +1,13 @@
|
||||
package io.kestra.core.lock;
|
||||
|
||||
import io.kestra.core.exceptions.KestraRuntimeException;
|
||||
|
||||
public class LockException extends KestraRuntimeException {
|
||||
public LockException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public LockException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
||||
207
core/src/main/java/io/kestra/core/lock/LockService.java
Normal file
207
core/src/main/java/io/kestra/core/lock/LockService.java
Normal file
@@ -0,0 +1,207 @@
|
||||
package io.kestra.core.lock;
|
||||
|
||||
import io.kestra.core.repositories.LockRepositoryInterface;
|
||||
import io.kestra.core.server.ServerInstance;
|
||||
import io.kestra.core.utils.Disposable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
/**
|
||||
* This service provides facility for executing Runnable and Callable tasks inside a lock.
|
||||
* Note: it may be handy to provide a tryLock facility that, if locked, skips executing the Runnable or Callable and exits immediately.
|
||||
*
|
||||
* @implNote There is no expiry for locks, so a service may hold a lock infinitely until the service is restarted as the
|
||||
* liveness mechanism releases all locks when the service is unreachable.
|
||||
* This may be improved at some point by adding an expiry (for ex 30s) and running a thread that will periodically
|
||||
* increase the expiry for all exiting locks. This should allow quicker recovery of zombie locks than relying on the liveness mechanism,
|
||||
* as a service wanted to lock an expired lock would be able to take it over.
|
||||
*/
|
||||
@Slf4j
|
||||
@Singleton
|
||||
public class LockService {
|
||||
private static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(300);
|
||||
private static final int DEFAULT_SLEEP_MS = 1;
|
||||
|
||||
private final LockRepositoryInterface lockRepository;
|
||||
|
||||
@Inject
|
||||
public LockService(LockRepositoryInterface lockRepository) {
|
||||
this.lockRepository = lockRepository;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a Runnable inside a lock.
|
||||
* If the lock is already taken, it will wait for at most the default lock timeout of 5mn.
|
||||
* @see #doInLock(String, String, Duration, Runnable)
|
||||
*
|
||||
* @param category lock category, ex 'executions'
|
||||
* @param id identifier of the lock identity inside the category, ex an execution ID
|
||||
*
|
||||
* @throws LockException if the lock cannot be hold before the timeout or the thread is interrupted.
|
||||
*/
|
||||
public void doInLock(String category, String id, Runnable runnable) {
|
||||
doInLock(category, id, DEFAULT_TIMEOUT, runnable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a Runnable inside a lock.
|
||||
* If the lock is already taken, it will wait for at most the <code>timeout</code> duration.
|
||||
* @see #doInLock(String, String, Runnable)
|
||||
*
|
||||
* @param category lock category, ex 'executions'
|
||||
* @param id identifier of the lock identity inside the category, ex an execution ID
|
||||
* @param timeout how much time to wait for the lock if another process already holds the same lock
|
||||
*
|
||||
* @throws LockException if the lock cannot be hold before the timeout or the thread is interrupted.
|
||||
*/
|
||||
public void doInLock(String category, String id, Duration timeout, Runnable runnable) {
|
||||
if (!lock(category, id, timeout)) {
|
||||
throw new LockException("Unable to hold the lock inside the configured timeout of " + timeout);
|
||||
}
|
||||
|
||||
try {
|
||||
runnable.run();
|
||||
} finally {
|
||||
unlock(category, id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Acquires the lock only if it is not held by another process at the time of invocation.
|
||||
*
|
||||
* @param category the category of the lock, e.g., 'executions'
|
||||
* @param id the identifier of the lock within the specified category, e.g., an execution ID
|
||||
* @return an optional {@link Disposable} to release the lock.
|
||||
*/
|
||||
public Optional<Disposable> tryLock(String category, String id) {
|
||||
return lock(category, id, Duration.ZERO) ? Optional.of(Disposable.of(() -> this.unlock(category, id))) : Optional.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to execute the provided {@code runnable} within a lock.
|
||||
* If the lock is already held by another process, the execution is skipped.
|
||||
*
|
||||
* @param category the category of the lock, e.g., 'executions'
|
||||
* @param id the identifier of the lock within the specified category, e.g., an execution ID
|
||||
* @param runnable the task to be executed if the lock is successfully acquired
|
||||
*/
|
||||
public void tryLock(String category, String id, Runnable runnable) {
|
||||
if (lock(category, id, Duration.ZERO)) {
|
||||
try {
|
||||
runnable.run();
|
||||
} finally {
|
||||
unlock(category, id);
|
||||
}
|
||||
} else {
|
||||
log.debug("Lock '{}'.'{}' already hold, skipping", category, id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a Callable inside a lock.
|
||||
* If the lock is already taken, it will wait for at most the default lock timeout of 5mn.
|
||||
*
|
||||
* @param category lock category, ex 'executions'
|
||||
* @param id identifier of the lock identity inside the category, ex an execution ID
|
||||
*
|
||||
* @throws LockException if the lock cannot be hold before the timeout or the thread is interrupted.
|
||||
*/
|
||||
public <T> T callInLock(String category, String id, Callable<T> callable) throws Exception {
|
||||
return callInLock(category, id, DEFAULT_TIMEOUT, callable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a Callable inside a lock.
|
||||
* If the lock is already taken, it will wait for at most the <code>timeout</code> duration.
|
||||
*
|
||||
* @param category lock category, ex 'executions'
|
||||
* @param id identifier of the lock identity inside the category, ex an execution ID
|
||||
* @param timeout how much time to wait for the lock if another process already holds the same lock
|
||||
*
|
||||
* @throws LockException if the lock cannot be hold before the timeout or the thread is interrupted.
|
||||
*/
|
||||
public <T> T callInLock(String category, String id, Duration timeout, Callable<T> callable) throws Exception {
|
||||
if (!lock(category, id, timeout)) {
|
||||
throw new LockException("Unable to hold the lock inside the configured timeout of " + timeout);
|
||||
}
|
||||
|
||||
try {
|
||||
return callable.call();
|
||||
} finally {
|
||||
unlock(category, id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Release all locks hold by this service identifier.
|
||||
*/
|
||||
public List<Lock> releaseAllLocks(String serviceId) {
|
||||
return lockRepository.deleteByOwner(serviceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the lock identified by this category and identifier already exist.
|
||||
*/
|
||||
public boolean isLocked(String category, String id) {
|
||||
return lockRepository.findById(category, id).isPresent();
|
||||
}
|
||||
|
||||
private boolean lock(String category, String id, Duration timeout) throws LockException {
|
||||
log.debug("Locking '{}'.'{}'", category, id);
|
||||
long deadline = System.currentTimeMillis() + timeout.toMillis();
|
||||
do {
|
||||
Optional<Lock> existing = lockRepository.findById(category, id);
|
||||
if (existing.isEmpty()) {
|
||||
// we can try to lock!
|
||||
Lock newLock = new Lock(category, id, ServerInstance.INSTANCE_ID, Instant.now());
|
||||
if (lockRepository.create(newLock)) {
|
||||
return true;
|
||||
} else {
|
||||
log.debug("Cannot create the lock, it may have been created after we check for its existence and before we create it");
|
||||
}
|
||||
} else {
|
||||
log.debug("Already locked by: {}", existing.get().getOwner());
|
||||
}
|
||||
|
||||
// fast path for when we don't want to wait for the lock
|
||||
if (timeout.isZero()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
Thread.sleep(DEFAULT_SLEEP_MS);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new LockException(e);
|
||||
}
|
||||
} while (System.currentTimeMillis() < deadline);
|
||||
|
||||
log.debug("Lock already hold, waiting for it to be released");
|
||||
return false;
|
||||
}
|
||||
|
||||
private void unlock(String category, String id) {
|
||||
log.debug("Unlocking '{}'.'{}'", category, id);
|
||||
|
||||
Optional<Lock> existing = lockRepository.findById(category, id);
|
||||
if (existing.isEmpty()) {
|
||||
log.warn("Try to unlock unknown lock '{}'.'{}', ignoring it", category, id);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!existing.get().getOwner().equals(ServerInstance.INSTANCE_ID)) {
|
||||
log.warn("Try to unlock a lock we no longer own '{}'.'{}', ignoring it", category, id);
|
||||
return;
|
||||
}
|
||||
|
||||
lockRepository.deleteById(category, id);
|
||||
}
|
||||
}
|
||||
@@ -4,9 +4,17 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.ExecutionKilled;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.TriggerContext;
|
||||
import io.kestra.core.runners.*;
|
||||
import io.micrometer.core.instrument.*;
|
||||
import io.kestra.core.models.triggers.TriggerId;
|
||||
import io.kestra.core.runners.SubflowExecutionResult;
|
||||
import io.kestra.core.runners.WorkerTask;
|
||||
import io.kestra.core.runners.WorkerTaskResult;
|
||||
import io.kestra.core.runners.WorkerTrigger;
|
||||
import io.micrometer.core.instrument.Counter;
|
||||
import io.micrometer.core.instrument.DistributionSummary;
|
||||
import io.micrometer.core.instrument.Gauge;
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import io.micrometer.core.instrument.Tags;
|
||||
import io.micrometer.core.instrument.Timer;
|
||||
import io.micrometer.core.instrument.binder.MeterBinder;
|
||||
import io.micrometer.core.instrument.search.Search;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -116,6 +124,16 @@ public class MetricRegistry {
|
||||
public static final String METRIC_SCHEDULER_EXECUTION_MISSING_DURATION_DESCRIPTION = "Missing execution duration inside the Scheduler. A missing execution is an execution that was triggered by the Scheduler but not yet started by the Executor";
|
||||
public static final String METRIC_SCHEDULER_EVALUATION_LOOP_DURATION = "scheduler.evaluation.loop.duration";
|
||||
public static final String METRIC_SCHEDULER_EVALUATION_LOOP_DURATION_DESCRIPTION = "Trigger evaluation loop duration inside the Scheduler";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_THREAD_MAX = "scheduler.eventloop.thread.max";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_THREAD_MAX_DESCRIPTION = "The maximum number of event-loop threads.";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_TICK_DURATION = "scheduler.eventloop.tick.duration";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_TICK_DURATION_DESCRIPTION = "The duration of a single event-loop tick.";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_EVENT_RECEIVED_COUNT = "scheduler.eventloop.events.received.count";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_EVENT_RECEIVED_COUNT_DESCRIPTION = "The total number of events received by the event-loop.";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_EVENT_PROCESS_DURATION = "scheduler.eventloop.event.process.duration";
|
||||
public static final String METRIC_SCHEDULER_EVENTLOOP_EVENT_PROCESS_DURATION_DESCRIPTION = "The duration spent processing individual events within the event-loop.";
|
||||
public static final String METRIC_SCHEDULER_ASSIGNED_VNODES_COUNT = "scheduler.assigned.vnodes.count";
|
||||
public static final String METRIC_SCHEDULER_ASSIGNED_VNODES_COUNT_DESCRIPTION = "The number of virtual nodes assigned to the scheduler";
|
||||
|
||||
public static final String METRIC_STREAMS_STATE_COUNT = "stream.state.count";
|
||||
public static final String METRIC_STREAMS_STATE_COUNT_DESCRIPTION = "Number of Kafka Stream applications by state";
|
||||
@@ -127,6 +145,8 @@ public class MetricRegistry {
|
||||
public static final String METRIC_QUEUE_BIG_MESSAGE_COUNT_DESCRIPTION = "Total number of big messages";
|
||||
public static final String METRIC_QUEUE_PRODUCE_COUNT = "queue.produce.count";
|
||||
public static final String METRIC_QUEUE_PRODUCE_COUNT_DESCRIPTION = "Total number of produced messages";
|
||||
public static final String METRIC_QUEUE_RECEIVE_COUNT = "queue.receive.count";
|
||||
public static final String METRIC_QUEUE_RECEIVE_COUNT_DESCRIPTION = "Total number of received messages";
|
||||
public static final String METRIC_QUEUE_RECEIVE_DURATION = "queue.receive.duration";
|
||||
public static final String METRIC_QUEUE_RECEIVE_DURATION_DESCRIPTION = "Queue duration to receive and consume a batch of messages";
|
||||
public static final String METRIC_QUEUE_POLL_SIZE = "queue.poll.size";
|
||||
@@ -379,19 +399,19 @@ public class MetricRegistry {
|
||||
};
|
||||
return execution.getTenantId() == null ? baseTags : ArrayUtils.addAll(baseTags, TAG_TENANT_ID, execution.getTenantId());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return tags for current {@link TriggerContext}
|
||||
* Return tags for current {@link TriggerId}
|
||||
*
|
||||
* @param triggerContext the current TriggerContext
|
||||
* @param triggerId the trigger
|
||||
* @return tags to apply to metrics
|
||||
*/
|
||||
public String[] tags(TriggerContext triggerContext) {
|
||||
public String[] tags(TriggerId triggerId) {
|
||||
var baseTags = new String[]{
|
||||
TAG_FLOW_ID, triggerContext.getFlowId(),
|
||||
TAG_NAMESPACE_ID, triggerContext.getNamespace()
|
||||
TAG_FLOW_ID, triggerId.getFlowId(),
|
||||
TAG_NAMESPACE_ID, triggerId.getNamespace()
|
||||
};
|
||||
return triggerContext.getTenantId() == null ? baseTags : ArrayUtils.addAll(baseTags, TAG_TENANT_ID, triggerContext.getTenantId());
|
||||
return triggerId.getTenantId() == null ? baseTags : ArrayUtils.addAll(baseTags, TAG_TENANT_ID, triggerId.getTenantId());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,13 +4,16 @@ import io.kestra.core.utils.MapUtils;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.annotation.Nullable;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Schema(description = "A key/value pair that can be attached to a Flow or Execution. Labels are often used to organize and categorize objects.")
|
||||
public record Label(@NotEmpty String key, @NotEmpty String value) {
|
||||
public record Label(
|
||||
@NotEmpty @Pattern(regexp = "^[\\p{Ll}][\\p{L}0-9._-]*$", message = "Invalid label key. A valid key contains only lowercase letters numbers hyphens (-) underscores (_) or periods (.) and must begin with a lowercase letter.") String key,
|
||||
@NotEmpty String value) {
|
||||
public static final String SYSTEM_PREFIX = "system.";
|
||||
|
||||
// system labels
|
||||
|
||||
@@ -94,7 +94,7 @@ public record QueryFilter(
|
||||
KIND("kind") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS,Op.NOT_EQUALS);
|
||||
return List.of(Op.EQUALS,Op.NOT_EQUALS, Op.IN, Op.NOT_IN);
|
||||
}
|
||||
},
|
||||
LABELS("labels") {
|
||||
@@ -106,7 +106,7 @@ public record QueryFilter(
|
||||
FLOW_ID("flowId") {
|
||||
@Override
|
||||
public List<Op> supportedOp() {
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX);
|
||||
return List.of(Op.EQUALS, Op.NOT_EQUALS, Op.CONTAINS, Op.STARTS_WITH, Op.ENDS_WITH, Op.REGEX, Op.IN, Op.NOT_IN, Op.PREFIX);
|
||||
}
|
||||
},
|
||||
UPDATED("updated") {
|
||||
@@ -226,7 +226,7 @@ public record QueryFilter(
|
||||
FLOW {
|
||||
@Override
|
||||
public List<Field> supportedField() {
|
||||
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE);
|
||||
return List.of(Field.LABELS, Field.NAMESPACE, Field.QUERY, Field.SCOPE, Field.FLOW_ID);
|
||||
}
|
||||
},
|
||||
NAMESPACE {
|
||||
@@ -241,7 +241,7 @@ public record QueryFilter(
|
||||
return List.of(
|
||||
Field.QUERY, Field.SCOPE, Field.FLOW_ID, Field.START_DATE, Field.END_DATE,
|
||||
Field.STATE, Field.LABELS, Field.TRIGGER_EXECUTION_ID, Field.CHILD_FILTER,
|
||||
Field.NAMESPACE,Field.KIND
|
||||
Field.NAMESPACE, Field.KIND
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -2,7 +2,11 @@ package io.kestra.core.models.conditions;
|
||||
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
|
||||
|
||||
/**
|
||||
* Conditions of type ScheduleCondition have a special behavior inside the {@link io.kestra.plugin.core.trigger.Schedule} trigger.
|
||||
* They are evaluated specifically and would be taken into account when computing the next evaluation date.
|
||||
* Only conditions based on date should be marked as ScheduleCondition.
|
||||
*/
|
||||
public interface ScheduleCondition {
|
||||
boolean test(ConditionContext conditionContext) throws InternalException;
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.flows.check.Check;
|
||||
import io.kestra.core.models.flows.sla.SLA;
|
||||
import io.kestra.core.models.listeners.Listener;
|
||||
import io.kestra.core.models.tasks.FlowableTask;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.tasks.retrys.AbstractRetry;
|
||||
@@ -86,10 +85,6 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
return this._finally;
|
||||
}
|
||||
|
||||
@Valid
|
||||
@Deprecated
|
||||
List<Listener> listeners;
|
||||
|
||||
@Valid
|
||||
List<Task> afterExecution;
|
||||
|
||||
@@ -99,20 +94,6 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
@Valid
|
||||
List<PluginDefault> pluginDefaults;
|
||||
|
||||
@Valid
|
||||
List<PluginDefault> taskDefaults;
|
||||
|
||||
@Deprecated
|
||||
public void setTaskDefaults(List<PluginDefault> taskDefaults) {
|
||||
this.pluginDefaults = taskDefaults;
|
||||
this.taskDefaults = taskDefaults;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public List<PluginDefault> getTaskDefaults() {
|
||||
return this.taskDefaults;
|
||||
}
|
||||
|
||||
@Valid
|
||||
Concurrency concurrency;
|
||||
|
||||
@@ -153,7 +134,7 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
this.tasks != null ? this.tasks : Collections.<Task>emptyList(),
|
||||
this.errors != null ? this.errors : Collections.<Task>emptyList(),
|
||||
this._finally != null ? this._finally : Collections.<Task>emptyList(),
|
||||
this.afterExecutionTasks()
|
||||
this.afterExecution != null ? this.afterExecution : Collections.<Task>emptyList()
|
||||
)
|
||||
.flatMap(Collection::stream);
|
||||
}
|
||||
@@ -254,55 +235,6 @@ public class Flow extends AbstractFlow implements HasUID {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated should not be used
|
||||
*/
|
||||
@Deprecated(forRemoval = true, since = "0.21.0")
|
||||
public Flow updateTask(String taskId, Task newValue) throws InternalException {
|
||||
Task task = this.findTaskByTaskId(taskId);
|
||||
Flow flow = this instanceof FlowWithSource flowWithSource ? flowWithSource.toFlow() : this;
|
||||
|
||||
Map<String, Object> map = NON_DEFAULT_OBJECT_MAPPER.convertValue(flow, JacksonMapper.MAP_TYPE_REFERENCE);
|
||||
|
||||
return NON_DEFAULT_OBJECT_MAPPER.convertValue(
|
||||
recursiveUpdate(map, task, newValue),
|
||||
Flow.class
|
||||
);
|
||||
}
|
||||
|
||||
private static Object recursiveUpdate(Object object, Task previous, Task newValue) {
|
||||
if (object instanceof Map<?, ?> value) {
|
||||
if (value.containsKey("id") && value.get("id").equals(previous.getId()) &&
|
||||
value.containsKey("type") && value.get("type").equals(previous.getType())
|
||||
) {
|
||||
return NON_DEFAULT_OBJECT_MAPPER.convertValue(newValue, JacksonMapper.MAP_TYPE_REFERENCE);
|
||||
} else {
|
||||
return value
|
||||
.entrySet()
|
||||
.stream()
|
||||
.map(e -> new AbstractMap.SimpleEntry<>(
|
||||
e.getKey(),
|
||||
recursiveUpdate(e.getValue(), previous, newValue)
|
||||
))
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
}
|
||||
} else if (object instanceof Collection<?> value) {
|
||||
return value
|
||||
.stream()
|
||||
.map(r -> recursiveUpdate(r, previous, newValue))
|
||||
.toList();
|
||||
} else {
|
||||
return object;
|
||||
}
|
||||
}
|
||||
|
||||
private List<Task> afterExecutionTasks() {
|
||||
return ListUtils.concat(
|
||||
ListUtils.emptyOnNull(this.getListeners()).stream().flatMap(listener -> listener.getTasks().stream()).toList(),
|
||||
this.getAfterExecution()
|
||||
);
|
||||
}
|
||||
|
||||
public boolean equalsWithoutRevision(FlowInterface o) {
|
||||
try {
|
||||
return WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(this).equals(WITHOUT_REVISION_OBJECT_MAPPER.writeValueAsString(o));
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.models.triggers.TriggerId;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.EqualsAndHashCode;
|
||||
@@ -39,7 +39,7 @@ public interface FlowId {
|
||||
return of(tenantId, namespace, id,null).toString();
|
||||
}
|
||||
|
||||
static String uid(Trigger trigger) {
|
||||
static String uid(TriggerId trigger) {
|
||||
return of(trigger.getTenantId(), trigger.getNamespace(), trigger.getFlowId(), null).toString();
|
||||
}
|
||||
|
||||
@@ -50,11 +50,20 @@ public interface FlowId {
|
||||
/**
|
||||
* Static helper method for constructing a new {@link FlowId}.
|
||||
*
|
||||
* @return a new {@link FlowId}.
|
||||
* @return a new {@link FlowId}.
|
||||
*/
|
||||
static FlowId of(String tenantId, String namespace, String id, Integer revision) {
|
||||
return new Default(tenantId, namespace, id, revision);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for constructing a new {@link TriggerId}.
|
||||
*
|
||||
* @return a new {@link FlowId}.
|
||||
*/
|
||||
static FlowId of(TriggerId triggerId) {
|
||||
return new Default(triggerId.getTenantId(), triggerId.getNamespace(), triggerId.getFlowId(), null);
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
|
||||
@@ -19,7 +19,6 @@ public class FlowWithSource extends Flow {
|
||||
|
||||
String source;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public Flow toFlow() {
|
||||
return Flow.builder()
|
||||
.tenantId(this.tenantId)
|
||||
@@ -34,7 +33,6 @@ public class FlowWithSource extends Flow {
|
||||
.tasks(this.tasks)
|
||||
.errors(this.errors)
|
||||
._finally(this._finally)
|
||||
.listeners(this.listeners)
|
||||
.afterExecution(this.afterExecution)
|
||||
.triggers(this.triggers)
|
||||
.pluginDefaults(this.pluginDefaults)
|
||||
@@ -61,7 +59,6 @@ public class FlowWithSource extends Flow {
|
||||
.build();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public static FlowWithSource of(Flow flow, String source) {
|
||||
return FlowWithSource.builder()
|
||||
.tenantId(flow.tenantId)
|
||||
@@ -77,7 +74,6 @@ public class FlowWithSource extends Flow {
|
||||
.errors(flow.errors)
|
||||
._finally(flow._finally)
|
||||
.afterExecution(flow.afterExecution)
|
||||
.listeners(flow.listeners)
|
||||
.triggers(flow.triggers)
|
||||
.pluginDefaults(flow.pluginDefaults)
|
||||
.disabled(flow.disabled)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package io.kestra.core.models.flows;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonSetter;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.kestra.core.models.flows.input.*;
|
||||
@@ -26,7 +25,6 @@ import lombok.experimental.SuperBuilder;
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", visible = true, include = JsonTypeInfo.As.EXISTING_PROPERTY)
|
||||
@JsonSubTypes({
|
||||
@JsonSubTypes.Type(value = ArrayInput.class, name = "ARRAY"),
|
||||
@JsonSubTypes.Type(value = BooleanInput.class, name = "BOOLEAN"),
|
||||
@JsonSubTypes.Type(value = BoolInput.class, name = "BOOL"),
|
||||
@JsonSubTypes.Type(value = DateInput.class, name = "DATE"),
|
||||
@JsonSubTypes.Type(value = DateTimeInput.class, name = "DATETIME"),
|
||||
@@ -37,7 +35,6 @@ import lombok.experimental.SuperBuilder;
|
||||
@JsonSubTypes.Type(value = JsonInput.class, name = "JSON"),
|
||||
@JsonSubTypes.Type(value = SecretInput.class, name = "SECRET"),
|
||||
@JsonSubTypes.Type(value = StringInput.class, name = "STRING"),
|
||||
@JsonSubTypes.Type(value = EnumInput.class, name = "ENUM"),
|
||||
@JsonSubTypes.Type(value = SelectInput.class, name = "SELECT"),
|
||||
@JsonSubTypes.Type(value = TimeInput.class, name = "TIME"),
|
||||
@JsonSubTypes.Type(value = URIInput.class, name = "URI"),
|
||||
@@ -55,9 +52,6 @@ public abstract class Input<T> implements Data {
|
||||
@Pattern(regexp="^[a-zA-Z0-9][.a-zA-Z0-9_-]*")
|
||||
String id;
|
||||
|
||||
@Deprecated
|
||||
String name;
|
||||
|
||||
@Schema(
|
||||
title = "The type of the input."
|
||||
)
|
||||
@@ -95,13 +89,4 @@ public abstract class Input<T> implements Data {
|
||||
String displayName;
|
||||
|
||||
public abstract void validate(T input) throws ConstraintViolationException;
|
||||
|
||||
@JsonSetter
|
||||
public void setName(String name) {
|
||||
if (this.id == null) {
|
||||
this.id = name;
|
||||
}
|
||||
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -267,6 +267,10 @@ public class State {
|
||||
return this == Type.RUNNING || this == Type.KILLING;
|
||||
}
|
||||
|
||||
public boolean onlyRunning() {
|
||||
return this == Type.RUNNING;
|
||||
}
|
||||
|
||||
public boolean isFailed() {
|
||||
return this == Type.FAILED;
|
||||
}
|
||||
|
||||
@@ -9,11 +9,9 @@ import io.micronaut.core.annotation.Introspected;
|
||||
@Introspected
|
||||
public enum Type {
|
||||
STRING(StringInput.class.getName()),
|
||||
ENUM(EnumInput.class.getName()),
|
||||
SELECT(SelectInput.class.getName()),
|
||||
INT(IntInput.class.getName()),
|
||||
FLOAT(FloatInput.class.getName()),
|
||||
BOOLEAN(BooleanInput.class.getName()),
|
||||
BOOL(BoolInput.class.getName()),
|
||||
DATETIME(DateTimeInput.class.getName()),
|
||||
DATE(DateInput.class.getName()),
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
package io.kestra.core.models.flows.input;
|
||||
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
|
||||
@SuperBuilder
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Deprecated
|
||||
public class BooleanInput extends Input<Boolean> {
|
||||
@Override
|
||||
public void validate(Boolean input) throws ConstraintViolationException {
|
||||
// no validation yet
|
||||
}
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
package io.kestra.core.models.flows.input;
|
||||
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.validations.ManualConstraintViolation;
|
||||
import io.kestra.core.validations.Regex;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@SuperBuilder
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Deprecated
|
||||
public class EnumInput extends Input<String> {
|
||||
@Schema(
|
||||
title = "List of values.",
|
||||
description = "DEPRECATED; use 'SELECT' instead."
|
||||
)
|
||||
@NotNull
|
||||
List<@Regex String> values;
|
||||
|
||||
@Override
|
||||
public void validate(String input) throws ConstraintViolationException {
|
||||
if (!values.contains(input) && this.getRequired()) {
|
||||
throw ManualConstraintViolation.toConstraintViolationException(
|
||||
"it must match the values `" + values + "`",
|
||||
this,
|
||||
EnumInput.class,
|
||||
getId(),
|
||||
input
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,6 @@ import java.util.Set;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.validations.FileInputValidation;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
@@ -19,17 +17,14 @@ import java.util.List;
|
||||
@FileInputValidation
|
||||
public class FileInput extends Input<URI> {
|
||||
|
||||
private static final String DEFAULT_EXTENSION = ".upl";
|
||||
public static final String DEFAULT_EXTENSION = ".upl";
|
||||
|
||||
@Deprecated(since = "0.24", forRemoval = true)
|
||||
public String extension;
|
||||
|
||||
/**
|
||||
* List of allowed file extensions (e.g., [".csv", ".txt", ".pdf"]).
|
||||
* Each extension must start with a dot.
|
||||
*/
|
||||
private List<String> allowedFileExtensions;
|
||||
|
||||
|
||||
/**
|
||||
* Gets the file extension from the URI's path
|
||||
*/
|
||||
@@ -53,15 +48,4 @@ public class FileInput extends Input<URI> {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static String findFileInputExtension(@NotNull final List<Input<?>> inputs, @NotNull final String fileName) {
|
||||
String res = inputs.stream()
|
||||
.filter(in -> in instanceof FileInput)
|
||||
.filter(in -> in.getId().equals(fileName))
|
||||
.filter(flowInput -> ((FileInput) flowInput).getExtension() != null)
|
||||
.map(flowInput -> ((FileInput) flowInput).getExtension())
|
||||
.findFirst()
|
||||
.orElse(FileInput.DEFAULT_EXTENSION);
|
||||
return res.startsWith(".") ? res : "." + res;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
package io.kestra.core.models.flows.sla;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public interface SLAMonitorStorage {
|
||||
void save(SLAMonitor slaMonitor);
|
||||
|
||||
void purge(String executionId);
|
||||
|
||||
void processExpired(Instant now, Consumer<SLAMonitor> consumer);
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package io.kestra.core.models.hierarchies;
|
||||
|
||||
import io.kestra.core.models.triggers.*;
|
||||
import io.kestra.core.scheduler.model.TriggerState;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
@@ -12,9 +13,9 @@ import lombok.ToString;
|
||||
public abstract class AbstractGraphTrigger extends AbstractGraph {
|
||||
@Setter
|
||||
private TriggerInterface triggerDeclaration;
|
||||
private final Trigger trigger;
|
||||
private final TriggerState trigger;
|
||||
|
||||
public AbstractGraphTrigger(AbstractTrigger triggerDeclaration, Trigger trigger) {
|
||||
public AbstractGraphTrigger(AbstractTrigger triggerDeclaration, TriggerState trigger) {
|
||||
super();
|
||||
|
||||
this.triggerDeclaration = triggerDeclaration;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
package io.kestra.core.models.hierarchies;
|
||||
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.scheduler.model.TriggerState;
|
||||
|
||||
|
||||
public class GraphTrigger extends AbstractGraphTrigger {
|
||||
public GraphTrigger(AbstractTrigger triggerDeclaration, Trigger trigger) {
|
||||
public GraphTrigger(AbstractTrigger triggerDeclaration, TriggerState trigger) {
|
||||
super(triggerDeclaration, trigger);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
package io.kestra.core.models.listeners;
|
||||
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import lombok.Builder;
|
||||
import lombok.Value;
|
||||
import io.kestra.core.models.conditions.Condition;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
|
||||
import java.util.List;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
|
||||
@Value
|
||||
@Builder
|
||||
@Introspected
|
||||
public class Listener {
|
||||
String description;
|
||||
|
||||
@Valid
|
||||
List<Condition> conditions;
|
||||
|
||||
@Valid
|
||||
@NotEmpty
|
||||
List<Task> tasks;
|
||||
}
|
||||
@@ -54,12 +54,7 @@ public class Property<T> {
|
||||
private String expression;
|
||||
private T value;
|
||||
|
||||
/**
|
||||
* @deprecated use {@link #ofExpression(String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
// Note: when not used, this constructor would not be deleted but made private so it can only be used by ofExpression(String) and the deserializer
|
||||
public Property(String expression) {
|
||||
private Property(String expression) {
|
||||
this(expression, false);
|
||||
}
|
||||
|
||||
@@ -93,7 +88,7 @@ public class Property<T> {
|
||||
* @return a new {@link Property} without a pre-rendered value
|
||||
*/
|
||||
public Property<T> skipCache() {
|
||||
return Property.ofExpression(expression);
|
||||
return new Property<>(expression, true);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -130,14 +125,6 @@ public class Property<T> {
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use {@link #ofValue(Object)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static <V> Property<V> of(V value) {
|
||||
return ofValue(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a new Property object with a Pebble expression.<br>
|
||||
* This property object will not cache its rendered value.
|
||||
|
||||
@@ -15,31 +15,10 @@ public class TaskException extends Exception {
|
||||
|
||||
private transient AbstractLogConsumer logConsumer;
|
||||
|
||||
/**
|
||||
* This constructor will certainly be removed in 0.21 as we keep it only because all task runners must be impacted.
|
||||
* @deprecated use {@link #TaskException(int, AbstractLogConsumer)} instead.
|
||||
*/
|
||||
@Deprecated(forRemoval = true, since = "0.20.0")
|
||||
public TaskException(int exitCode, int stdOutCount, int stdErrCount) {
|
||||
this("Command failed with exit code " + exitCode, exitCode, stdOutCount, stdErrCount);
|
||||
}
|
||||
|
||||
public TaskException(int exitCode, AbstractLogConsumer logConsumer) {
|
||||
this("Command failed with exit code " + exitCode, exitCode, logConsumer);
|
||||
}
|
||||
|
||||
/**
|
||||
* This constructor will certainly be removed in 0.21 as we keep it only because all task runners must be impacted.
|
||||
* @deprecated use {@link #TaskException(String, int, AbstractLogConsumer)} instead.
|
||||
*/
|
||||
@Deprecated(forRemoval = true, since = "0.20.0")
|
||||
public TaskException(String message, int exitCode, int stdOutCount, int stdErrCount) {
|
||||
super(message);
|
||||
this.exitCode = exitCode;
|
||||
this.stdOutCount = stdOutCount;
|
||||
this.stdErrCount = stdErrCount;
|
||||
}
|
||||
|
||||
public TaskException(String message, int exitCode, AbstractLogConsumer logConsumer) {
|
||||
super(message);
|
||||
this.exitCode = exitCode;
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
package io.kestra.core.models.templates;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.introspect.AnnotatedMember;
|
||||
import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector;
|
||||
import io.kestra.core.models.DeletedInterface;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.TenantInterface;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.validations.ManualConstraintViolation;
|
||||
import io.kestra.core.serializers.JacksonMapper;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.swagger.v3.oas.annotations.Hidden;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.util.*;
|
||||
import jakarta.validation.ConstraintViolation;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
@Introspected
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
public class Template implements DeletedInterface, TenantInterface, HasUID {
|
||||
private static final ObjectMapper YAML_MAPPER = JacksonMapper.ofYaml().copy()
|
||||
.setAnnotationIntrospector(new JacksonAnnotationIntrospector() {
|
||||
@Override
|
||||
public boolean hasIgnoreMarker(final AnnotatedMember m) {
|
||||
List<String> exclusions = Arrays.asList("revision", "deleted", "source");
|
||||
return exclusions.contains(m.getName()) || super.hasIgnoreMarker(m);
|
||||
}
|
||||
})
|
||||
.setDefaultPropertyInclusion(JsonInclude.Include.NON_DEFAULT);
|
||||
|
||||
@Setter
|
||||
@Hidden
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]*")
|
||||
private String tenantId;
|
||||
|
||||
@NotNull
|
||||
@NotBlank
|
||||
@Pattern(regexp = "^[a-zA-Z0-9][a-zA-Z0-9._-]*")
|
||||
private String id;
|
||||
|
||||
@NotNull
|
||||
@Pattern(regexp="^[a-z0-9][a-z0-9._-]*")
|
||||
private String namespace;
|
||||
|
||||
String description;
|
||||
|
||||
@Valid
|
||||
@NotEmpty
|
||||
private List<Task> tasks;
|
||||
|
||||
@Valid
|
||||
private List<Task> errors;
|
||||
|
||||
@Valid
|
||||
@JsonProperty("finally")
|
||||
@Getter(AccessLevel.NONE)
|
||||
protected List<Task> _finally;
|
||||
|
||||
public List<Task> getFinally() {
|
||||
return this._finally;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@Builder.Default
|
||||
private final boolean deleted = false;
|
||||
|
||||
|
||||
/** {@inheritDoc **/
|
||||
@Override
|
||||
@JsonIgnore
|
||||
public String uid() {
|
||||
return Template.uid(
|
||||
this.getTenantId(),
|
||||
this.getNamespace(),
|
||||
this.getId()
|
||||
);
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public static String uid(String tenantId, String namespace, String id) {
|
||||
return IdUtils.fromParts(
|
||||
tenantId,
|
||||
namespace,
|
||||
id
|
||||
);
|
||||
}
|
||||
|
||||
public Optional<ConstraintViolationException> validateUpdate(Template updated) {
|
||||
Set<ConstraintViolation<?>> violations = new HashSet<>();
|
||||
|
||||
if (!updated.getId().equals(this.getId())) {
|
||||
violations.add(ManualConstraintViolation.of(
|
||||
"Illegal template id update",
|
||||
updated,
|
||||
Template.class,
|
||||
"template.id",
|
||||
updated.getId()
|
||||
));
|
||||
}
|
||||
|
||||
if (!updated.getNamespace().equals(this.getNamespace())) {
|
||||
violations.add(ManualConstraintViolation.of(
|
||||
"Illegal namespace update",
|
||||
updated,
|
||||
Template.class,
|
||||
"template.namespace",
|
||||
updated.getNamespace()
|
||||
));
|
||||
}
|
||||
|
||||
if (!violations.isEmpty()) {
|
||||
return Optional.of(new ConstraintViolationException(violations));
|
||||
} else {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
public String generateSource() {
|
||||
try {
|
||||
return YAML_MAPPER.writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public Template toDeleted() {
|
||||
return new Template(
|
||||
this.tenantId,
|
||||
this.id,
|
||||
this.namespace,
|
||||
this.description,
|
||||
this.tasks,
|
||||
this.errors,
|
||||
this._finally,
|
||||
true
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
package io.kestra.core.models.templates;
|
||||
|
||||
import io.micronaut.context.annotation.Requires;
|
||||
import io.micronaut.core.util.StringUtils;
|
||||
|
||||
import java.lang.annotation.*;
|
||||
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ElementType.PACKAGE, ElementType.TYPE})
|
||||
@Requires(property = "kestra.templates.enabled", value = StringUtils.TRUE, defaultValue = StringUtils.FALSE)
|
||||
@Inherited
|
||||
public @interface TemplateEnabled {
|
||||
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
package io.kestra.core.models.templates;
|
||||
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import lombok.extern.jackson.Jacksonized;
|
||||
|
||||
@SuperBuilder
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
@Introspected
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
public class TemplateSource extends Template {
|
||||
String source;
|
||||
String exception;
|
||||
}
|
||||
@@ -82,6 +82,12 @@ abstract public class AbstractTrigger implements TriggerInterface {
|
||||
@PluginProperty(hidden = true, group = PluginProperty.CORE_GROUP)
|
||||
private boolean failOnTriggerError = false;
|
||||
|
||||
@PluginProperty(group = PluginProperty.CORE_GROUP)
|
||||
@Schema(
|
||||
title = "Specifies whether a trigger is allowed to start a new execution even if a previous run is still in progress."
|
||||
)
|
||||
private boolean allowConcurrent = false;
|
||||
|
||||
/**
|
||||
* For backward compatibility: we rename minLogLevel to logLevel.
|
||||
* @deprecated use {@link #logLevel} instead
|
||||
|
||||
@@ -66,15 +66,4 @@ public class Backfill {
|
||||
title = "The nextExecutionDate before the backfill was created."
|
||||
)
|
||||
ZonedDateTime previousNextExecutionDate;
|
||||
|
||||
public Backfill(ZonedDateTime start, ZonedDateTime end, ZonedDateTime currentDate, Boolean paused, Map<String, Object> inputs, List<Label> labels, ZonedDateTime previousNextExecutionDate) {
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
this.currentDate = start;
|
||||
this.paused = paused != null ? paused : false;
|
||||
this.inputs = inputs;
|
||||
this.labels = labels;
|
||||
this.previousNextExecutionDate = previousNextExecutionDate;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.core.exceptions.InvalidTriggerConfigurationException;
|
||||
import io.kestra.core.models.annotations.PluginProperty;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.scheduler.SchedulerClock;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.time.DateTimeException;
|
||||
@@ -51,9 +52,9 @@ public interface PollingTriggerInterface extends WorkerTriggerInterface {
|
||||
Duration interval = this.getInterval();
|
||||
|
||||
try {
|
||||
return ZonedDateTime.now().plus(interval);
|
||||
return SchedulerClock.now().plus(interval);
|
||||
} catch (DateTimeException | ArithmeticException e) {
|
||||
throw new InvalidTriggerConfigurationException("Trigger interval too large", e);
|
||||
throw new InvalidTriggerConfigurationException("Trigger interval duration too large '" + interval + "'", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,12 +3,22 @@ package io.kestra.core.models.triggers;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.validations.TimezoneId;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.TimeZone;
|
||||
|
||||
public interface Schedulable extends PollingTriggerInterface {
|
||||
|
||||
public interface Schedulable extends PollingTriggerInterface{
|
||||
String PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES = "recoverMissedSchedules";
|
||||
|
||||
@TimezoneId
|
||||
@Schema(
|
||||
title = "The [time zone identifier](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) (i.e. the second column in [the Wikipedia table](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List)) to use for scheduling the trigger. Default value is the system time-zone."
|
||||
)
|
||||
String getTimezone();
|
||||
|
||||
/**
|
||||
* Compute the previous evaluation of a trigger.
|
||||
* This is used when a trigger misses some schedule to compute the next date to evaluate in the past.
|
||||
@@ -23,7 +33,7 @@ public interface Schedulable extends PollingTriggerInterface{
|
||||
default RecoverMissedSchedules defaultRecoverMissedSchedules(RunContext runContext) {
|
||||
return runContext
|
||||
.<String>pluginConfiguration(PLUGIN_PROPERTY_RECOVER_MISSED_SCHEDULES)
|
||||
.map(conf -> RecoverMissedSchedules.valueOf(conf))
|
||||
.map(RecoverMissedSchedules::valueOf)
|
||||
.orElse(RecoverMissedSchedules.ALL);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.exceptions.InvalidTriggerConfigurationException;
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowId;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.plugin.core.trigger.Schedule;
|
||||
import io.kestra.core.scheduler.model.TriggerState;
|
||||
import io.kestra.core.scheduler.vnodes.VNodes;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import lombok.*;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* DON'T USE THIS CLASS - ONLY REQUIRED FOR 2.0 MIGRATION
|
||||
*/
|
||||
@Deprecated(forRemoval = true, since="2.0.0")
|
||||
@SuperBuilder(toBuilder = true)
|
||||
@ToString
|
||||
@EqualsAndHashCode(callSuper = true)
|
||||
@@ -38,6 +38,9 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
@Setter // it's unfortunate but neither toBuilder() not @With works so using @Setter here
|
||||
private String workerId;
|
||||
|
||||
@Nullable
|
||||
private Set<String> executions;
|
||||
|
||||
protected Trigger(TriggerBuilder<?, ?> b) {
|
||||
super(b);
|
||||
this.executionId = b.executionId;
|
||||
@@ -48,267 +51,34 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
public static TriggerBuilder<?, ?> builder() {
|
||||
return new TriggerBuilderImpl();
|
||||
}
|
||||
|
||||
|
||||
/** {@inheritDoc **/
|
||||
@Override
|
||||
public String uid() {
|
||||
return uid(this);
|
||||
}
|
||||
|
||||
public static String uid(Trigger trigger) {
|
||||
return IdUtils.fromParts(
|
||||
trigger.getTenantId(),
|
||||
trigger.getNamespace(),
|
||||
trigger.getFlowId(),
|
||||
trigger.getTriggerId()
|
||||
);
|
||||
}
|
||||
|
||||
public static String uid(Execution execution) {
|
||||
return IdUtils.fromParts(
|
||||
execution.getTenantId(),
|
||||
execution.getNamespace(),
|
||||
execution.getFlowId(),
|
||||
execution.getTrigger().getId()
|
||||
);
|
||||
}
|
||||
|
||||
public static String uid(FlowInterface flow, AbstractTrigger abstractTrigger) {
|
||||
return IdUtils.fromParts(
|
||||
flow.getTenantId(),
|
||||
flow.getNamespace(),
|
||||
flow.getId(),
|
||||
abstractTrigger.getId()
|
||||
);
|
||||
}
|
||||
|
||||
public String flowUid() {
|
||||
return FlowId.uidWithoutRevision(this.getTenantId(), this.getNamespace(), this.getFlowId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Trigger with no execution information and no evaluation lock.
|
||||
*/
|
||||
public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger) {
|
||||
return Trigger.builder()
|
||||
.tenantId(flow.getTenantId())
|
||||
.namespace(flow.getNamespace())
|
||||
.flowId(flow.getId())
|
||||
.triggerId(abstractTrigger.getId())
|
||||
.stopAfter(abstractTrigger.getStopAfter())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Trigger from polling trigger with no execution information and no evaluation lock.
|
||||
*/
|
||||
public static Trigger of(TriggerContext triggerContext, ZonedDateTime nextExecutionDate) {
|
||||
return fromContext(triggerContext)
|
||||
.nextExecutionDate(nextExecutionDate)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Trigger with execution information and specific nextExecutionDate.
|
||||
* This one is use when starting a schedule execution as the nextExecutionDate come from the execution variables
|
||||
* <p>
|
||||
* This is used to lock the trigger while an execution is running, it will also erase the evaluation lock.
|
||||
*/
|
||||
public static Trigger of(TriggerContext triggerContext, Execution execution, ZonedDateTime nextExecutionDate) {
|
||||
return fromContext(triggerContext)
|
||||
.executionId(execution.getId())
|
||||
.updatedDate(Instant.now())
|
||||
.nextExecutionDate(nextExecutionDate)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static Trigger fromEvaluateFailed(TriggerContext triggerContext, ZonedDateTime nextExecutionDate) {
|
||||
return fromContext(triggerContext)
|
||||
.executionId(null)
|
||||
.updatedDate(Instant.now())
|
||||
.nextExecutionDate(nextExecutionDate)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Trigger with execution information.
|
||||
* <p>
|
||||
* This is used to update the trigger with the execution information, it will also erase the trigger date.
|
||||
*/
|
||||
public static Trigger of(Execution execution, Trigger trigger) {
|
||||
return Trigger.builder()
|
||||
.tenantId(execution.getTenantId())
|
||||
.namespace(execution.getNamespace())
|
||||
.flowId(execution.getFlowId())
|
||||
.triggerId(execution.getTrigger().getId())
|
||||
.date(trigger.getDate())
|
||||
.nextExecutionDate(trigger.getNextExecutionDate())
|
||||
.executionId(execution.getId())
|
||||
.updatedDate(Instant.now())
|
||||
.backfill(trigger.getBackfill())
|
||||
.stopAfter(trigger.getStopAfter())
|
||||
.disabled(trigger.getDisabled())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Trigger with an evaluate running date.
|
||||
* <p>
|
||||
* This is used to lock the trigger evaluation.
|
||||
*/
|
||||
public static Trigger of(Trigger trigger, ZonedDateTime evaluateRunningDate) {
|
||||
return fromContext(trigger)
|
||||
.nextExecutionDate(trigger.getNextExecutionDate())
|
||||
.evaluateRunningDate(evaluateRunningDate)
|
||||
.updatedDate(Instant.now())
|
||||
.build();
|
||||
}
|
||||
|
||||
// Used to update trigger in flowListeners
|
||||
public static Trigger of(FlowInterface flow, AbstractTrigger abstractTrigger, ConditionContext conditionContext, Optional<Trigger> lastTrigger) throws Exception {
|
||||
ZonedDateTime nextDate = null;
|
||||
boolean disabled = lastTrigger.map(TriggerContext::getDisabled).orElse(Boolean.FALSE);
|
||||
|
||||
if (abstractTrigger instanceof PollingTriggerInterface pollingTriggerInterface) {
|
||||
try {
|
||||
nextDate = pollingTriggerInterface.nextEvaluationDate(conditionContext, Optional.empty());
|
||||
} catch (InvalidTriggerConfigurationException e) {
|
||||
disabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
return Trigger.builder()
|
||||
.tenantId(flow.getTenantId())
|
||||
.namespace(flow.getNamespace())
|
||||
.flowId(flow.getId())
|
||||
.triggerId(abstractTrigger.getId())
|
||||
.date(ZonedDateTime.now().truncatedTo(ChronoUnit.SECONDS))
|
||||
.nextExecutionDate(nextDate)
|
||||
.stopAfter(abstractTrigger.getStopAfter())
|
||||
.disabled(disabled)
|
||||
.backfill(null)
|
||||
.build();
|
||||
}
|
||||
|
||||
public Trigger resetExecution(Flow flow, Execution execution, ConditionContext conditionContext) {
|
||||
boolean disabled = this.getStopAfter() != null ? this.getStopAfter().contains(execution.getState().getCurrent()) : this.getDisabled();
|
||||
if (!disabled) {
|
||||
AbstractTrigger abstractTrigger = flow.findTriggerByTriggerId(this.getTriggerId());
|
||||
if (abstractTrigger == null) {
|
||||
throw new IllegalArgumentException("Unable to find trigger with id '" + this.getTriggerId() + "'");
|
||||
}
|
||||
// If trigger is a schedule and execution ended after the next execution date
|
||||
else if (abstractTrigger instanceof Schedule schedule &&
|
||||
this.getNextExecutionDate() != null &&
|
||||
execution.getState().getEndDate().get().isAfter(this.getNextExecutionDate().toInstant())
|
||||
) {
|
||||
RecoverMissedSchedules recoverMissedSchedules = Optional.ofNullable(schedule.getRecoverMissedSchedules())
|
||||
.orElseGet(() -> schedule.defaultRecoverMissedSchedules(conditionContext.getRunContext()));
|
||||
|
||||
ZonedDateTime previousDate = schedule.previousEvaluationDate(conditionContext);
|
||||
|
||||
if (recoverMissedSchedules.equals(RecoverMissedSchedules.LAST)) {
|
||||
return resetExecution(execution.getState().getCurrent(), previousDate);
|
||||
} else if (recoverMissedSchedules.equals(RecoverMissedSchedules.NONE)) {
|
||||
return resetExecution(execution.getState().getCurrent(), schedule.nextEvaluationDate(conditionContext, Optional.empty()));
|
||||
}
|
||||
}
|
||||
}
|
||||
return resetExecution(execution.getState().getCurrent());
|
||||
}
|
||||
|
||||
public Trigger resetExecution(State.Type executionEndState) {
|
||||
return resetExecution(executionEndState, this.getNextExecutionDate());
|
||||
}
|
||||
|
||||
public Trigger resetExecution(State.Type executionEndState, ZonedDateTime nextExecutionDate) {
|
||||
// switch disabled automatically if the executionEndState is one of the stopAfter states
|
||||
Boolean disabled = this.getStopAfter() != null ? this.getStopAfter().contains(executionEndState) : this.getDisabled();
|
||||
|
||||
return Trigger.builder()
|
||||
.tenantId(this.getTenantId())
|
||||
.namespace(this.getNamespace())
|
||||
.flowId(this.getFlowId())
|
||||
.triggerId(this.getTriggerId())
|
||||
.date(this.getDate())
|
||||
.nextExecutionDate(nextExecutionDate)
|
||||
.stopAfter(this.getStopAfter())
|
||||
.backfill(this.getBackfill())
|
||||
.disabled(disabled)
|
||||
.evaluateRunningDate(this.getEvaluateRunningDate())
|
||||
.build();
|
||||
}
|
||||
|
||||
public Trigger unlock() {
|
||||
return Trigger.builder()
|
||||
.tenantId(this.getTenantId())
|
||||
.namespace(this.getNamespace())
|
||||
.flowId(this.getFlowId())
|
||||
.triggerId(this.getTriggerId())
|
||||
.date(this.getDate())
|
||||
.nextExecutionDate(this.getNextExecutionDate())
|
||||
.backfill(this.getBackfill())
|
||||
.stopAfter(this.getStopAfter())
|
||||
.disabled(this.getDisabled())
|
||||
.build();
|
||||
}
|
||||
|
||||
public Trigger withBackfill(final Backfill backfill) {
|
||||
Trigger updated = this;
|
||||
// If a backfill is created, we update the trigger
|
||||
// and set the nextExecutionDate() as the previous one
|
||||
if (backfill != null) {
|
||||
updated = this.toBuilder()
|
||||
.backfill(
|
||||
backfill
|
||||
.toBuilder()
|
||||
.end(backfill.getEnd() != null ? backfill.getEnd() : ZonedDateTime.now())
|
||||
.currentDate(backfill.getStart())
|
||||
.previousNextExecutionDate(this.getNextExecutionDate())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
|
||||
// if the next date is after the backfill end, we remove the backfill
|
||||
// if not, we update the backfill with the next Date
|
||||
// which will be the base date to calculate the next one
|
||||
public Trigger checkBackfill() {
|
||||
if (this.getBackfill() != null && !this.getBackfill().getPaused()) {
|
||||
Backfill backfill = this.getBackfill();
|
||||
if (this.getNextExecutionDate().isAfter(backfill.getEnd())) {
|
||||
|
||||
return this.toBuilder().nextExecutionDate(backfill.getPreviousNextExecutionDate()).backfill(null).build();
|
||||
} else {
|
||||
|
||||
return this.toBuilder()
|
||||
.backfill(
|
||||
backfill.toBuilder().currentDate(this.getNextExecutionDate()).build()
|
||||
)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
// Add this line and all is good
|
||||
|
||||
private static TriggerBuilder<?, ?> fromContext(TriggerContext triggerContext) {
|
||||
return Trigger.builder()
|
||||
.tenantId(triggerContext.getTenantId())
|
||||
.namespace(triggerContext.getNamespace())
|
||||
.flowId(triggerContext.getFlowId())
|
||||
.triggerId(triggerContext.getTriggerId())
|
||||
.date(triggerContext.getDate())
|
||||
.backfill(triggerContext.getBackfill())
|
||||
.stopAfter(triggerContext.getStopAfter())
|
||||
.disabled(triggerContext.getDisabled());
|
||||
}
|
||||
|
||||
|
||||
// This is a hack to make JavaDoc working as annotation processor didn't run before JavaDoc.
|
||||
// See https://stackoverflow.com/questions/51947791/javadoc-cannot-find-symbol-error-when-using-lomboks-builder-annotation
|
||||
public static abstract class TriggerBuilder<C extends Trigger, B extends TriggerBuilder<C, B>> extends TriggerContextBuilder<C, B> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts this trigger to {@link TriggerState}.
|
||||
*
|
||||
* @param vNodes the number of virtual nodes.
|
||||
* @return the new {@link TriggerState}.
|
||||
*/
|
||||
public TriggerState toTriggerState(int vNodes) {
|
||||
return TriggerState
|
||||
.builder()
|
||||
.tenantId(getTenantId())
|
||||
.namespace(getNamespace())
|
||||
.flowId(getFlowId())
|
||||
.triggerId(getTriggerId())
|
||||
.updatedAt(getUpdatedDate())
|
||||
.evaluatedAt(getDate().toInstant())
|
||||
.nextEvaluationDate(getNextExecutionDate().toInstant())
|
||||
.backfill(getBackfill())
|
||||
.stopAfter(getStopAfter())
|
||||
.disabled(getDisabled())
|
||||
.workerId(getWorkerId())
|
||||
.vnode(VNodes.computeVNodeFromTrigger(this, vNodes))
|
||||
.locked(getExecutionId() != null)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.micronaut.core.annotation.Introspected;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@@ -21,7 +20,7 @@ import java.util.List;
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
@Introspected
|
||||
public class TriggerContext {
|
||||
public class TriggerContext implements TriggerId {
|
||||
@Setter
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9_-]")
|
||||
private String tenantId;
|
||||
@@ -34,7 +33,10 @@ public class TriggerContext {
|
||||
|
||||
@NotNull
|
||||
private String triggerId;
|
||||
|
||||
|
||||
/**
|
||||
* The timestamp when this trigger was last executed.
|
||||
*/
|
||||
@NotNull
|
||||
private ZonedDateTime date;
|
||||
|
||||
@@ -46,7 +48,7 @@ public class TriggerContext {
|
||||
|
||||
@Nullable
|
||||
private List<State.Type> stopAfter;
|
||||
|
||||
|
||||
@Schema(defaultValue = "false")
|
||||
private Boolean disabled = Boolean.FALSE;
|
||||
|
||||
@@ -65,20 +67,7 @@ public class TriggerContext {
|
||||
public static TriggerContextBuilder<?, ?> builder() {
|
||||
return new TriggerContextBuilderImpl();
|
||||
}
|
||||
|
||||
public String uid() {
|
||||
return uid(this);
|
||||
}
|
||||
|
||||
public static String uid(TriggerContext trigger) {
|
||||
return IdUtils.fromParts(
|
||||
trigger.getTenantId(),
|
||||
trigger.getNamespace(),
|
||||
trigger.getFlowId(),
|
||||
trigger.getTriggerId()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
public Boolean getDisabled() {
|
||||
return this.disabled != null ? this.disabled : Boolean.FALSE;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
package io.kestra.core.models.triggers;
|
||||
|
||||
import io.kestra.core.models.HasUID;
|
||||
import io.kestra.core.models.flows.FlowId;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* Represents a unique and global identifier for a trigger.
|
||||
*/
|
||||
public interface TriggerId extends HasUID {
|
||||
|
||||
String getTenantId();
|
||||
|
||||
String getNamespace();
|
||||
|
||||
String getFlowId();
|
||||
|
||||
String getTriggerId();
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
default String uid() {
|
||||
return IdUtils.fromParts(
|
||||
getTenantId(),
|
||||
getNamespace(),
|
||||
getFlowId(),
|
||||
getTriggerId()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for constructing a new {@link TriggerId}.
|
||||
*
|
||||
* @return a new {@link TriggerId}.
|
||||
*/
|
||||
static TriggerId of(String tenantId, String namespace, String flowId, String triggerId) {
|
||||
return new TriggerId.Default(tenantId, namespace, flowId, triggerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for constructing a new {@link TriggerId}.
|
||||
*
|
||||
* @param flowId a {@link FlowId}
|
||||
* @param trigger an {@link AbstractTrigger}.
|
||||
* @return a new {@link TriggerId}.
|
||||
*/
|
||||
static TriggerId of(FlowId flowId, AbstractTrigger trigger) {
|
||||
return new Default(flowId.getTenantId(), flowId.getNamespace(), flowId.getId(), trigger.getId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for constructing a new {@link TriggerId}.
|
||||
*
|
||||
* @param triggerId a {@link TriggerId}
|
||||
* @return a new {@link TriggerId}.
|
||||
*/
|
||||
static TriggerId of(TriggerId triggerId) {
|
||||
return new Default(triggerId.getTenantId(), triggerId.getNamespace(), triggerId.getFlowId(), triggerId.getTriggerId());
|
||||
}
|
||||
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
@EqualsAndHashCode
|
||||
class Default implements TriggerId {
|
||||
private final String tenantId;
|
||||
private final String namespace;
|
||||
private final String flowId;
|
||||
private final String triggerId;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[tenant=" + tenantId +", namespace=" + namespace + ", flow=" + flowId + ", trigger=" + triggerId + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -82,23 +82,14 @@ public abstract class TriggerService {
|
||||
.build();
|
||||
|
||||
Map<String, Object> allInputs = new HashMap<>();
|
||||
// add flow inputs with default value
|
||||
var flow = conditionContext.getFlow();
|
||||
if (flow.getInputs() != null) {
|
||||
flow.getInputs().stream()
|
||||
.filter(input -> input.getDefaults() != null)
|
||||
.forEach(input -> allInputs.put(input.getId(), input.getDefaults()));
|
||||
}
|
||||
|
||||
if (inputs != null) {
|
||||
allInputs.putAll(inputs);
|
||||
}
|
||||
|
||||
// add inputs and inject defaults
|
||||
if (!allInputs.isEmpty()) {
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
|
||||
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
|
||||
}
|
||||
// add inputs and inject defaults (FlowInputOutput handles defaults internally)
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class);
|
||||
execution = execution.withInputs(flowInputOutput.readExecutionInputs(conditionContext.getFlow(), execution, allInputs));
|
||||
|
||||
return execution;
|
||||
}
|
||||
|
||||
@@ -1,36 +1,35 @@
|
||||
package io.kestra.core.queues;
|
||||
|
||||
import io.kestra.core.executor.command.ExecutionCommand;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.ExecutionKilled;
|
||||
import io.kestra.core.models.executions.LogEntry;
|
||||
import io.kestra.core.models.executions.MetricEntry;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.runners.*;
|
||||
|
||||
public interface QueueFactoryInterface {
|
||||
String EXECUTION_NAMED = "executionQueue";
|
||||
String EXECUTOR_NAMED = "executorQueue";
|
||||
String EXECUTION_EVENT_NAMED = "executionEventQueue";
|
||||
String WORKERJOB_NAMED = "workerJobQueue";
|
||||
String WORKERTASKRESULT_NAMED = "workerTaskResultQueue";
|
||||
String WORKERTRIGGERRESULT_NAMED = "workerTriggerResultQueue";
|
||||
String FLOW_NAMED = "flowQueue";
|
||||
String TEMPLATE_NAMED = "templateQueue";
|
||||
String WORKERTASKLOG_NAMED = "workerTaskLogQueue";
|
||||
String METRIC_QUEUE = "workerTaskMetricQueue";
|
||||
String KILL_NAMED = "executionKilledQueue";
|
||||
String WORKERINSTANCE_NAMED = "workerInstanceQueue";
|
||||
String WORKERJOBRUNNING_NAMED = "workerJobRunningQueue";
|
||||
String TRIGGER_NAMED = "triggerQueue";
|
||||
String SUBFLOWEXECUTIONRESULT_NAMED = "subflowExecutionResultQueue";
|
||||
String CLUSTER_EVENT_NAMED = "clusterEventQueue";
|
||||
String SUBFLOWEXECUTIONEND_NAMED = "subflowExecutionEndQueue";
|
||||
String MULTIPLE_CONDITION_EVENT_NAMED = "multipleConditionEventQueue";
|
||||
String EXECUTION_COMMAND_NAMED = "executionCommandQueue";
|
||||
|
||||
QueueInterface<Execution> execution();
|
||||
|
||||
QueueInterface<Executor> executor();
|
||||
QueueInterface<ExecutionEvent> executionEvent();
|
||||
|
||||
QueueInterface<ExecutionCommand> executionCommand();
|
||||
|
||||
WorkerJobQueueInterface workerJob();
|
||||
|
||||
@@ -46,14 +45,8 @@ public interface QueueFactoryInterface {
|
||||
|
||||
QueueInterface<ExecutionKilled> kill();
|
||||
|
||||
QueueInterface<Template> template();
|
||||
|
||||
QueueInterface<WorkerInstance> workerInstance();
|
||||
|
||||
QueueInterface<WorkerJobRunning> workerJobRunning();
|
||||
|
||||
QueueInterface<Trigger> trigger();
|
||||
|
||||
QueueInterface<SubflowExecutionResult> subflowExecutionResult();
|
||||
|
||||
QueueInterface<SubflowExecutionEnd> subflowExecutionEnd();
|
||||
|
||||
@@ -35,6 +35,24 @@ public interface QueueInterface<T> extends Closeable, Pauseable {
|
||||
|
||||
void delete(String consumerGroup, T message) throws QueueException;
|
||||
|
||||
/**
|
||||
* Delete all messages of the queue for this key.
|
||||
* This is used to purge a queue for a specific key.
|
||||
* A queue implementation may omit to implement it and purge records differently.
|
||||
*/
|
||||
default void deleteByKey(String key) throws QueueException {
|
||||
// by default do nothing
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all messages of the queue for a set of keys.
|
||||
* This is used to purge a queue for specific keys.
|
||||
* A queue implementation may omit to implement it and purge records differently.
|
||||
*/
|
||||
default void deleteByKeys(List<String> keys) throws QueueException {
|
||||
// by default do nothing
|
||||
}
|
||||
|
||||
default Runnable receive(Consumer<Either<T, DeserializationException>> consumer) {
|
||||
return receive(null, consumer, false);
|
||||
}
|
||||
@@ -54,4 +72,20 @@ public interface QueueInterface<T> extends Closeable, Pauseable {
|
||||
}
|
||||
|
||||
Runnable receive(String consumerGroup, Class<?> queueType, Consumer<Either<T, DeserializationException>> consumer, boolean forUpdate);
|
||||
|
||||
default Runnable receiveBatch(Class<?> queueType, Consumer<List<Either<T, DeserializationException>>> consumer) {
|
||||
return receiveBatch(null, queueType, consumer);
|
||||
}
|
||||
|
||||
default Runnable receiveBatch(String consumerGroup, Class<?> queueType, Consumer<List<Either<T, DeserializationException>>> consumer) {
|
||||
return receiveBatch(consumerGroup, queueType, consumer, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Consumer a batch of messages.
|
||||
* By default, it consumes a single message, a queue implementation may implement it to support batch consumption.
|
||||
*/
|
||||
default Runnable receiveBatch(String consumerGroup, Class<?> queueType, Consumer<List<Either<T, DeserializationException>>> consumer, boolean forUpdate) {
|
||||
return receive(consumerGroup, either -> consumer.accept(List.of(either)), forUpdate);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,12 +19,8 @@ public class QueueService {
|
||||
return ((SubflowExecution<?>) object).getExecution().getId();
|
||||
} else if (object.getClass() == SubflowExecutionResult.class) {
|
||||
return ((SubflowExecutionResult) object).getExecutionId();
|
||||
} else if (object.getClass() == ExecutorState.class) {
|
||||
return ((ExecutorState) object).getExecutionId();
|
||||
} else if (object.getClass() == Setting.class) {
|
||||
return ((Setting) object).getKey();
|
||||
} else if (object.getClass() == Executor.class) {
|
||||
return ((Executor) object).getExecution().getId();
|
||||
} else if (object.getClass() == MetricEntry.class) {
|
||||
return null;
|
||||
} else if (object.getClass() == SubflowExecutionEnd.class) {
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import io.kestra.core.runners.ConcurrencyLimit;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface ConcurrencyLimitRepositoryInterface {
|
||||
/**
|
||||
* Update a concurrency limit
|
||||
* WARNING: this is inherently unsafe and must only be used for administration
|
||||
*/
|
||||
ConcurrencyLimit update(ConcurrencyLimit concurrencyLimit);
|
||||
|
||||
/**
|
||||
* Returns all concurrency limits from the database for a given tenant
|
||||
*/
|
||||
List<ConcurrencyLimit> find(String tenantId);
|
||||
|
||||
/**
|
||||
* Find a concurrency limit by its id
|
||||
*/
|
||||
Optional<ConcurrencyLimit> findById(@NotNull String tenantId, @NotNull String namespace, @NotNull String flowId);
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.statistics.DailyExecutionStatistics;
|
||||
@@ -7,6 +8,7 @@ import io.kestra.core.models.executions.statistics.ExecutionCount;
|
||||
import io.kestra.core.models.executions.statistics.Flow;
|
||||
import io.kestra.core.models.flows.FlowScope;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.triggers.TriggerId;
|
||||
import io.kestra.core.utils.DateUtils;
|
||||
import io.kestra.plugin.core.dashboard.data.Executions;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
@@ -23,7 +25,7 @@ import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Execution>, QueryBuilderInterface<Executions.Fields> {
|
||||
public interface ExecutionRepositoryInterface extends QueryBuilderInterface<Executions.Fields> {
|
||||
default Optional<Execution> findById(String tenantId, String id) {
|
||||
return findById(tenantId, id, false);
|
||||
}
|
||||
@@ -35,7 +37,7 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
ArrayListTotal<Execution> findByFlowId(String tenantId, String namespace, String id, Pageable pageable);
|
||||
|
||||
/**
|
||||
* Finds all the executions that was triggered by the given execution id.
|
||||
* Finds all the executions that were triggered by the given execution id.
|
||||
*
|
||||
* @param tenantId the tenant id.
|
||||
* @param triggerExecutionId the id of the execution trigger.
|
||||
@@ -43,6 +45,14 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
*/
|
||||
Flux<Execution> findAllByTriggerExecutionId(String tenantId, String triggerExecutionId);
|
||||
|
||||
/**
|
||||
* Finds all the executions that was triggered by the given trigger.
|
||||
*
|
||||
* @param triggerId the trigger id.
|
||||
* @return a {@link Flux} of one or more executions.
|
||||
*/
|
||||
Flux<Execution> findAllByTrigger(TriggerId triggerId);
|
||||
|
||||
/**
|
||||
* Finds the latest execution for the given flow and s.
|
||||
*
|
||||
@@ -95,6 +105,10 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
|
||||
Flux<Execution> findAsync(String tenantId, List<QueryFilter> filters);
|
||||
|
||||
/**
|
||||
* WARNING: this method is only intended to be used in tests.
|
||||
*/
|
||||
@VisibleForTesting
|
||||
Execution delete(Execution execution);
|
||||
|
||||
Integer purge(Execution execution);
|
||||
@@ -140,10 +154,11 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
@Nullable ZonedDateTime endDate,
|
||||
@Nullable List<String> namespaces);
|
||||
|
||||
/**
|
||||
* WARNING: this method is only intended to be used in tests or inside the BackupService.
|
||||
*/
|
||||
Execution save(Execution execution);
|
||||
|
||||
Execution update(Execution execution);
|
||||
|
||||
default Function<String, String> sortMapping() throws IllegalArgumentException {
|
||||
return s -> s;
|
||||
}
|
||||
|
||||
@@ -161,10 +161,22 @@ public interface FlowRepositoryInterface extends QueryBuilderInterface<Flows.Fie
|
||||
|
||||
Flux<Flow> findAsync(String tenantId, List<QueryFilter> filters);
|
||||
|
||||
/**
|
||||
* Create a flow.
|
||||
* It should not be called directly but instead <code>FlowService.create(GenericFlow flow)</code> should be used as it re-computes topology and triggers.
|
||||
*/
|
||||
FlowWithSource create(GenericFlow flow);
|
||||
|
||||
/**
|
||||
* Update a flow.
|
||||
* It should not be called directly but instead <code>FlowService.update(GenericFlow flow)</code> should be used as it re-computes topology and triggers.
|
||||
*/
|
||||
FlowWithSource update(GenericFlow flow, FlowInterface previous) throws ConstraintViolationException;
|
||||
|
||||
/**
|
||||
* Delete a flow.
|
||||
* It should not be called directly but instead <code>FlowService.delete(GenericFlow flow)</code> should be used as it re-computes topology and triggers.
|
||||
*/
|
||||
FlowWithSource delete(FlowInterface flow);
|
||||
|
||||
Boolean existAnyNoAcl(String tenantId);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user