mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 14:00:23 -05:00
Compare commits
73 Commits
timeline-f
...
try-to-tes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
55458a0428 | ||
|
|
b294457953 | ||
|
|
02d9c589fb | ||
|
|
6340d1c72f | ||
|
|
f439bd53d7 | ||
|
|
e54e3d5308 | ||
|
|
d084f2cd26 | ||
|
|
015960c78e | ||
|
|
39a09ecb67 | ||
|
|
45ce878d65 | ||
|
|
3ee647b9a8 | ||
|
|
7a7cb006bf | ||
|
|
911e6d5705 | ||
|
|
bf1458dde7 | ||
|
|
bd31e0eebd | ||
|
|
de02e4dd70 | ||
|
|
ec235b91fc | ||
|
|
ff1efa9958 | ||
|
|
e43c8ce387 | ||
|
|
2bd4e82b42 | ||
|
|
e63d6d1d86 | ||
|
|
a9752e65f2 | ||
|
|
151c56f3de | ||
|
|
d562ce3e65 | ||
|
|
681386a05b | ||
|
|
51ddfaf155 | ||
|
|
caee0a293f | ||
|
|
ba92880fa3 | ||
|
|
36b27510fb | ||
|
|
da2907e096 | ||
|
|
9b40665e64 | ||
|
|
0d35b5b355 | ||
|
|
339eb79854 | ||
|
|
0ee753529b | ||
|
|
84668fdfb9 | ||
|
|
9802f046e8 | ||
|
|
848b4d6577 | ||
|
|
1159bc5eb9 | ||
|
|
9b7ef37d14 | ||
|
|
89dfd18658 | ||
|
|
13ed2252bc | ||
|
|
c73b103bb3 | ||
|
|
396a077942 | ||
|
|
68e6fa2a4c | ||
|
|
a18748b3b2 | ||
|
|
236fcff7b4 | ||
|
|
cbbd697732 | ||
|
|
6b84737651 | ||
|
|
6ee7ecbd6b | ||
|
|
504f925085 | ||
|
|
7d37d2be93 | ||
|
|
94751a3b21 | ||
|
|
ba83b91680 | ||
|
|
56f62fb89f | ||
|
|
a0efe4b1f3 | ||
|
|
9af6338ae5 | ||
|
|
d53b933bdf | ||
|
|
a35c2816c5 | ||
|
|
d88eb9974c | ||
|
|
af3d5a384a | ||
|
|
e9ad352ccf | ||
|
|
1a95b83fb7 | ||
|
|
095939ff7a | ||
|
|
094f523874 | ||
|
|
c7efb2514a | ||
|
|
887537d8c1 | ||
|
|
0630b741b9 | ||
|
|
d2b7e723e1 | ||
|
|
080ceadf37 | ||
|
|
a89d902bc2 | ||
|
|
e2ef7d412a | ||
|
|
54c667ec4b | ||
|
|
1c53758d33 |
32
.github/workflows/kestra-devtools-test.yml
vendored
32
.github/workflows/kestra-devtools-test.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: kestra-devtools test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- 'dev-tools/kestra-devtools/**'
|
||||
|
||||
env:
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: kestra-devtools tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Npm - install
|
||||
working-directory: 'dev-tools/kestra-devtools'
|
||||
run: npm ci
|
||||
|
||||
- name: Run tests
|
||||
working-directory: 'dev-tools/kestra-devtools'
|
||||
run: npm run test
|
||||
|
||||
- name: Npm - Run build
|
||||
working-directory: 'dev-tools/kestra-devtools'
|
||||
run: npm run build
|
||||
1
.github/workflows/pull-request.yml
vendored
1
.github/workflows/pull-request.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-pr
|
||||
|
||||
87
.github/workflows/workflow-backend-test.yml
vendored
87
.github/workflows/workflow-backend-test.yml
vendored
@@ -64,88 +64,11 @@ jobs:
|
||||
if: ${{ !cancelled() && github.event_name == 'pull_request' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
run: |
|
||||
export KESTRA_PWD=$(pwd) && sh -c 'cd dev-tools/kestra-devtools && npm ci && npm run build && node dist/kestra-devtools-cli.cjs generateTestReportSummary --only-errors --ci $KESTRA_PWD' > report.md
|
||||
cat report.md
|
||||
run: npx --yes @kestra-io/kestra-devtools generateTestReportSummary --only-errors --ci $(pwd)
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
reporter: java-junit
|
||||
path: '**/build/test-results/test/TEST-*.xml'
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
if: env.SONAR_TOKEN != ''
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
shell: bash
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# GCP
|
||||
- name: GCP - Auth with unit test account
|
||||
id: auth
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
uses: "google-github-actions/auth@v3"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- name: GCP - Setup Cloud SDK
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
uses: "google-github-actions/setup-gcloud@v3"
|
||||
|
||||
# Allure check
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
name: Allure - Generate slug variables
|
||||
|
||||
- name: Allure - Publish report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: Jacoco - Copy reports
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# Codecov
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
# Report Java
|
||||
- name: Report - Java
|
||||
uses: kestra-io/actions/composite/report-java@main
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
secrets: ${{ toJSON(secrets) }}
|
||||
@@ -33,10 +33,10 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://go.kestra.io/video/product-overview" target="_blank">
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
|
||||
<img src="https://kestra.io/startvideo.png" alt="Get started in 3 minutes with Kestra" width="640px" />
|
||||
</a>
|
||||
</p>
|
||||
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 4 minutes.</i></p>
|
||||
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 3 minutes.</i></p>
|
||||
|
||||
|
||||
## 🌟 What is Kestra?
|
||||
|
||||
@@ -232,7 +232,7 @@ subprojects {subProj ->
|
||||
environment 'ENV_TEST2', "Pass by env"
|
||||
|
||||
|
||||
if (subProj.name == 'core') {
|
||||
if (subProj.name == 'core' || subProj.name == 'jdbc-h2' || subProj.name == 'jdbc-mysql' || subProj.name == 'jdbc-postgres') {
|
||||
// JUnit 5 parallel settings
|
||||
systemProperty 'junit.jupiter.execution.parallel.enabled', 'true'
|
||||
systemProperty 'junit.jupiter.execution.parallel.mode.default', 'concurrent'
|
||||
|
||||
@@ -49,7 +49,7 @@ import java.util.concurrent.Callable;
|
||||
@Introspected
|
||||
public class App implements Callable<Integer> {
|
||||
public static void main(String[] args) {
|
||||
execute(App.class, args);
|
||||
execute(App.class, new String [] { Environment.CLI }, args);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -57,13 +57,13 @@ public class App implements Callable<Integer> {
|
||||
return PicocliRunner.call(App.class, "--help");
|
||||
}
|
||||
|
||||
protected static void execute(Class<?> cls, String... args) {
|
||||
protected static void execute(Class<?> cls, String[] environments, String... args) {
|
||||
// Log Bridge
|
||||
SLF4JBridgeHandler.removeHandlersForRootLogger();
|
||||
SLF4JBridgeHandler.install();
|
||||
|
||||
// Init ApplicationContext
|
||||
ApplicationContext applicationContext = App.applicationContext(cls, args);
|
||||
ApplicationContext applicationContext = App.applicationContext(cls, environments, args);
|
||||
|
||||
// Call Picocli command
|
||||
int exitCode = 0;
|
||||
@@ -80,17 +80,6 @@ public class App implements Callable<Integer> {
|
||||
System.exit(Objects.requireNonNullElse(exitCode, 0));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an {@link ApplicationContext} with additional properties based on configuration files (--config) and
|
||||
* forced Properties from current command.
|
||||
*
|
||||
* @param args args passed to java app
|
||||
* @return the application context created
|
||||
*/
|
||||
protected static ApplicationContext applicationContext(Class<?> mainClass,
|
||||
String[] args) {
|
||||
return applicationContext(mainClass, new String [] { Environment.CLI }, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an {@link ApplicationContext} with additional properties based on configuration files (--config) and
|
||||
|
||||
@@ -167,6 +167,8 @@ kestra:
|
||||
open-urls:
|
||||
- "/ping"
|
||||
- "/api/v1/executions/webhook/"
|
||||
- "/api/v1/main/executions/webhook/"
|
||||
- "/api/v1/*/executions/webhook/"
|
||||
|
||||
preview:
|
||||
initial-rows: 100
|
||||
|
||||
@@ -37,7 +37,7 @@ class AppTest {
|
||||
|
||||
final String[] args = new String[]{"server", serverType, "--help"};
|
||||
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, args)) {
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, args)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
|
||||
|
||||
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
|
||||
@@ -52,7 +52,7 @@ class AppTest {
|
||||
|
||||
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
|
||||
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
|
||||
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, argsWithMissingParams)) {
|
||||
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
|
||||
|
||||
assertThat(out.toString()).startsWith("Missing required parameters: ");
|
||||
|
||||
@@ -254,19 +254,7 @@ public record QueryFilter(
|
||||
*
|
||||
* @return List of {@code ResourceField} with resource names, fields, and operations.
|
||||
*/
|
||||
public static List<ResourceField> asResourceList() {
|
||||
return Arrays.stream(values())
|
||||
.map(Resource::toResourceField)
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static ResourceField toResourceField(Resource resource) {
|
||||
List<FieldOp> fieldOps = resource.supportedField().stream()
|
||||
.map(Resource::toFieldInfo)
|
||||
.toList();
|
||||
return new ResourceField(resource.name().toLowerCase(), fieldOps);
|
||||
}
|
||||
|
||||
|
||||
private static FieldOp toFieldInfo(Field field) {
|
||||
List<Operation> operations = field.supportedOp().stream()
|
||||
.map(Resource::toOperation)
|
||||
@@ -279,9 +267,6 @@ public record QueryFilter(
|
||||
}
|
||||
}
|
||||
|
||||
public record ResourceField(String name, List<FieldOp> fields) {
|
||||
}
|
||||
|
||||
public record FieldOp(String name, String value, List<Operation> operations) {
|
||||
}
|
||||
|
||||
|
||||
@@ -17,31 +17,12 @@ import java.util.List;
|
||||
@Introspected
|
||||
public class ExecutionUsage {
|
||||
private final List<DailyExecutionStatistics> dailyExecutionsCount;
|
||||
private final List<DailyExecutionStatistics> dailyTaskRunsCount;
|
||||
|
||||
public static ExecutionUsage of(final String tenantId,
|
||||
final ExecutionRepositoryInterface executionRepository,
|
||||
final ZonedDateTime from,
|
||||
final ZonedDateTime to) {
|
||||
|
||||
List<DailyExecutionStatistics> dailyTaskRunsCount = null;
|
||||
|
||||
try {
|
||||
dailyTaskRunsCount = executionRepository.dailyStatistics(
|
||||
null,
|
||||
tenantId,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
null,
|
||||
true);
|
||||
} catch (UnsupportedOperationException ignored) {
|
||||
|
||||
}
|
||||
|
||||
return ExecutionUsage.builder()
|
||||
.dailyExecutionsCount(executionRepository.dailyStatistics(
|
||||
null,
|
||||
@@ -52,28 +33,13 @@ public class ExecutionUsage {
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
null,
|
||||
false))
|
||||
.dailyTaskRunsCount(dailyTaskRunsCount)
|
||||
null))
|
||||
.build();
|
||||
}
|
||||
|
||||
public static ExecutionUsage of(final ExecutionRepositoryInterface repository,
|
||||
final ZonedDateTime from,
|
||||
final ZonedDateTime to) {
|
||||
List<DailyExecutionStatistics> dailyTaskRunsCount = null;
|
||||
try {
|
||||
dailyTaskRunsCount = repository.dailyStatisticsForAllTenants(
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
true
|
||||
);
|
||||
} catch (UnsupportedOperationException ignored) {}
|
||||
|
||||
return ExecutionUsage.builder()
|
||||
.dailyExecutionsCount(repository.dailyStatisticsForAllTenants(
|
||||
null,
|
||||
@@ -81,10 +47,8 @@ public class ExecutionUsage {
|
||||
null,
|
||||
from,
|
||||
to,
|
||||
DateUtils.GroupType.DAY,
|
||||
false
|
||||
DateUtils.GroupType.DAY
|
||||
))
|
||||
.dailyTaskRunsCount(dailyTaskRunsCount)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,34 +185,6 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
.build();
|
||||
}
|
||||
|
||||
public static Trigger update(Trigger currentTrigger, Trigger newTrigger, ZonedDateTime nextExecutionDate) throws Exception {
|
||||
Trigger updated = currentTrigger;
|
||||
|
||||
// If a backfill is created, we update the currentTrigger
|
||||
// and set the nextExecutionDate() as the previous one
|
||||
if (newTrigger.getBackfill() != null) {
|
||||
updated = currentTrigger.toBuilder()
|
||||
.backfill(
|
||||
newTrigger
|
||||
.getBackfill()
|
||||
.toBuilder()
|
||||
.end(newTrigger.getBackfill().getEnd() != null ? newTrigger.getBackfill().getEnd() : ZonedDateTime.now())
|
||||
.currentDate(
|
||||
newTrigger.getBackfill().getStart()
|
||||
)
|
||||
.previousNextExecutionDate(
|
||||
currentTrigger.getNextExecutionDate())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
return updated.toBuilder()
|
||||
.nextExecutionDate(newTrigger.getDisabled() ?
|
||||
null : nextExecutionDate)
|
||||
.disabled(newTrigger.getDisabled())
|
||||
.build();
|
||||
}
|
||||
|
||||
public Trigger resetExecution(Flow flow, Execution execution, ConditionContext conditionContext) {
|
||||
boolean disabled = this.getStopAfter() != null ? this.getStopAfter().contains(execution.getState().getCurrent()) : this.getDisabled();
|
||||
if (!disabled) {
|
||||
@@ -276,27 +248,22 @@ public class Trigger extends TriggerContext implements HasUID {
|
||||
.build();
|
||||
}
|
||||
|
||||
public Trigger initBackfill(Trigger newTrigger) {
|
||||
// If a backfill is created, we update the currentTrigger
|
||||
public Trigger withBackfill(final Backfill backfill) {
|
||||
Trigger updated = this;
|
||||
// If a backfill is created, we update the trigger
|
||||
// and set the nextExecutionDate() as the previous one
|
||||
if (newTrigger.getBackfill() != null) {
|
||||
|
||||
return this.toBuilder()
|
||||
if (backfill != null) {
|
||||
updated = this.toBuilder()
|
||||
.backfill(
|
||||
newTrigger
|
||||
.getBackfill()
|
||||
backfill
|
||||
.toBuilder()
|
||||
.end(newTrigger.getBackfill().getEnd() != null ? newTrigger.getBackfill().getEnd() : ZonedDateTime.now())
|
||||
.currentDate(
|
||||
newTrigger.getBackfill().getStart()
|
||||
)
|
||||
.previousNextExecutionDate(
|
||||
this.getNextExecutionDate())
|
||||
.end(backfill.getEnd() != null ? backfill.getEnd() : ZonedDateTime.now())
|
||||
.currentDate(backfill.getStart())
|
||||
.previousNextExecutionDate(this.getNextExecutionDate())
|
||||
.build())
|
||||
.build();
|
||||
}
|
||||
|
||||
return this;
|
||||
return updated;
|
||||
}
|
||||
|
||||
// if the next date is after the backfill end, we remove the backfill
|
||||
|
||||
@@ -144,7 +144,7 @@ public final class PluginDeserializer<T extends Plugin> extends JsonDeserializer
|
||||
|
||||
static String extractPluginRawIdentifier(final JsonNode node, final boolean isVersioningSupported) {
|
||||
String type = Optional.ofNullable(node.get(TYPE)).map(JsonNode::textValue).orElse(null);
|
||||
String version = Optional.ofNullable(node.get(VERSION)).map(JsonNode::textValue).orElse(null);
|
||||
String version = Optional.ofNullable(node.get(VERSION)).map(JsonNode::asText).orElse(null);
|
||||
|
||||
if (type == null || type.isEmpty()) {
|
||||
return null;
|
||||
|
||||
@@ -25,8 +25,6 @@ import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Execution>, QueryBuilderInterface<Executions.Fields> {
|
||||
Boolean isTaskRunEnabled();
|
||||
|
||||
default Optional<Execution> findById(String tenantId, String id) {
|
||||
return findById(tenantId, id, false);
|
||||
}
|
||||
@@ -96,12 +94,6 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
|
||||
Flux<Execution> findAllAsync(@Nullable String tenantId);
|
||||
|
||||
ArrayListTotal<TaskRun> findTaskRun(
|
||||
Pageable pageable,
|
||||
@Nullable String tenantId,
|
||||
List<QueryFilter> filters
|
||||
);
|
||||
|
||||
Execution delete(Execution execution);
|
||||
|
||||
Integer purge(Execution execution);
|
||||
@@ -112,8 +104,7 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
@Nullable String flowId,
|
||||
@Nullable ZonedDateTime startDate,
|
||||
@Nullable ZonedDateTime endDate,
|
||||
@Nullable DateUtils.GroupType groupBy,
|
||||
boolean isTaskRun
|
||||
@Nullable DateUtils.GroupType groupBy
|
||||
);
|
||||
|
||||
List<DailyExecutionStatistics> dailyStatistics(
|
||||
@@ -125,8 +116,7 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
|
||||
@Nullable ZonedDateTime startDate,
|
||||
@Nullable ZonedDateTime endDate,
|
||||
@Nullable DateUtils.GroupType groupBy,
|
||||
List<State.Type> state,
|
||||
boolean isTaskRun
|
||||
List<State.Type> state
|
||||
);
|
||||
|
||||
@Getter
|
||||
|
||||
@@ -45,7 +45,7 @@ final class Secret {
|
||||
for (var entry: data.entrySet()) {
|
||||
if (entry.getValue() instanceof Map map) {
|
||||
// if some value are of type EncryptedString we decode them and replace the object
|
||||
if (EncryptedString.TYPE.equalsIgnoreCase((String)map.get("type"))) {
|
||||
if (map.get("type") instanceof String typeStr && EncryptedString.TYPE.equalsIgnoreCase(typeStr)) {
|
||||
try {
|
||||
String decoded = decrypt((String) map.get("value"));
|
||||
decryptedMap.put(entry.getKey(), decoded);
|
||||
|
||||
@@ -168,6 +168,7 @@ public class Extension extends AbstractExtension {
|
||||
functions.put("randomPort", new RandomPortFunction());
|
||||
functions.put("fileExists", fileExistsFunction);
|
||||
functions.put("isFileEmpty", isFileEmptyFunction);
|
||||
functions.put("nanoId", new NanoIDFunction());
|
||||
functions.put("tasksWithState", new TasksWithStateFunction());
|
||||
functions.put(HttpFunction.NAME, httpFunction);
|
||||
return functions;
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
package io.kestra.core.runners.pebble.functions;
|
||||
|
||||
import io.pebbletemplates.pebble.error.PebbleException;
|
||||
import io.pebbletemplates.pebble.extension.Function;
|
||||
import io.pebbletemplates.pebble.template.EvaluationContext;
|
||||
import io.pebbletemplates.pebble.template.PebbleTemplate;
|
||||
|
||||
import java.security.SecureRandom;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class NanoIDFunction implements Function {
|
||||
|
||||
private static final int DEFAULT_LENGTH = 21;
|
||||
private static final char[] DEFAULT_ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_".toCharArray();
|
||||
private static final SecureRandom secureRandom = new SecureRandom();
|
||||
|
||||
private static final String LENGTH = "length";
|
||||
private static final String ALPHABET = "alphabet";
|
||||
|
||||
private static final int MAX_LENGTH = 1000;
|
||||
|
||||
@Override
|
||||
public Object execute(
|
||||
Map<String, Object> args, PebbleTemplate self, EvaluationContext context, int lineNumber) {
|
||||
int length = DEFAULT_LENGTH;
|
||||
if (args.containsKey(LENGTH) && (args.get(LENGTH) instanceof Long)) {
|
||||
length = parseLength(args, self, lineNumber);
|
||||
}
|
||||
char[] alphabet = DEFAULT_ALPHABET;
|
||||
if (args.containsKey(ALPHABET) && (args.get(ALPHABET) instanceof String)) {
|
||||
alphabet = ((String) args.get(ALPHABET)).toCharArray();
|
||||
}
|
||||
return createNanoID(length, alphabet);
|
||||
}
|
||||
|
||||
private static int parseLength(Map<String, Object> args, PebbleTemplate self, int lineNumber) {
|
||||
var value = (Long) args.get(LENGTH);
|
||||
if(value > MAX_LENGTH) {
|
||||
throw new PebbleException(
|
||||
null,
|
||||
"The 'nanoId()' function field 'length' must be lower than: " + MAX_LENGTH,
|
||||
lineNumber,
|
||||
self.getName());
|
||||
}
|
||||
return Math.toIntExact(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getArgumentNames() {
|
||||
return List.of(LENGTH,ALPHABET);
|
||||
}
|
||||
|
||||
String createNanoID(int length, char[] alphabet){
|
||||
final char[] data = new char[length];
|
||||
final byte[] bytes = new byte[length];
|
||||
final int mask = alphabet.length-1;
|
||||
secureRandom.nextBytes(bytes);
|
||||
for (int i = 0; i < length; ++i) {
|
||||
data[i] = alphabet[bytes[i] & mask];
|
||||
}
|
||||
return String.valueOf(data);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -163,21 +163,21 @@ public final class JacksonMapper {
|
||||
.build();
|
||||
}
|
||||
|
||||
public static Pair<JsonNode, JsonNode> getBiDirectionalDiffs(Object previous, Object current) {
|
||||
JsonNode previousJson = MAPPER.valueToTree(previous);
|
||||
JsonNode newJson = MAPPER.valueToTree(current);
|
||||
public static Pair<JsonNode, JsonNode> getBiDirectionalDiffs(Object before, Object after) {
|
||||
JsonNode beforeNode = MAPPER.valueToTree(before);
|
||||
JsonNode afterNode = MAPPER.valueToTree(after);
|
||||
|
||||
JsonNode patchPrevToNew = JsonDiff.asJson(previousJson, newJson);
|
||||
JsonNode patchNewToPrev = JsonDiff.asJson(newJson, previousJson);
|
||||
JsonNode patch = JsonDiff.asJson(beforeNode, afterNode);
|
||||
JsonNode revert = JsonDiff.asJson(afterNode, beforeNode);
|
||||
|
||||
return Pair.of(patchPrevToNew, patchNewToPrev);
|
||||
return Pair.of(patch, revert);
|
||||
}
|
||||
|
||||
public static JsonNode applyPatchesOnJsonNode(JsonNode jsonObject, List<JsonNode> patches) {
|
||||
for (JsonNode patch : patches) {
|
||||
try {
|
||||
// Required for ES
|
||||
if (patch.findValue("value") == null) {
|
||||
if (patch.findValue("value") == null && !patch.isEmpty()) {
|
||||
((ObjectNode) patch.get(0)).set("value", null);
|
||||
}
|
||||
jsonObject = JsonPatch.fromJson(patch).apply(jsonObject);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package io.kestra.core.storages.kv;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Getter;
|
||||
|
||||
import java.time.Duration;
|
||||
@@ -9,6 +10,7 @@ import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
@Getter
|
||||
@EqualsAndHashCode
|
||||
public class KVMetadata {
|
||||
private String description;
|
||||
private Instant expirationDate;
|
||||
@@ -17,14 +19,18 @@ public class KVMetadata {
|
||||
if (ttl != null && ttl.isNegative()) {
|
||||
throw new IllegalArgumentException("ttl cannot be negative");
|
||||
}
|
||||
|
||||
|
||||
|
||||
this.description = description;
|
||||
if (ttl != null) {
|
||||
this.expirationDate = Instant.now().plus(ttl);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public KVMetadata(String description, Instant expirationDate) {
|
||||
this.description = description;
|
||||
this.expirationDate = expirationDate;
|
||||
}
|
||||
|
||||
public KVMetadata(Map<String, String> metadata) {
|
||||
if (metadata == null) {
|
||||
return;
|
||||
@@ -46,4 +52,9 @@ public class KVMetadata {
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[description=" + description + ", expirationDate=" + expirationDate + "]";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,9 @@ import io.kestra.core.exceptions.ResourceExpiredException;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.URI;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Pattern;
|
||||
@@ -104,8 +106,33 @@ public interface KVStore {
|
||||
default boolean exists(String key) throws IOException {
|
||||
return list().stream().anyMatch(kvEntry -> kvEntry.key().equals(key));
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Finds a KV entry with associated metadata for a given key.
|
||||
*
|
||||
* @param key the KV entry key.
|
||||
* @return an optional of {@link KVValueAndMetadata}.
|
||||
*
|
||||
* @throws UncheckedIOException if an error occurred while executing the operation on the K/V store.
|
||||
*/
|
||||
default Optional<KVValueAndMetadata> findMetadataAndValue(final String key) throws UncheckedIOException {
|
||||
try {
|
||||
return get(key).flatMap(entry ->
|
||||
{
|
||||
try {
|
||||
return getValue(entry.key()).map(current -> new KVValueAndMetadata(new KVMetadata(entry.description(), entry.expirationDate()), current.value()));
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
} catch (ResourceExpiredException e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
Pattern KEY_VALIDATOR_PATTERN = Pattern.compile("[a-zA-Z0-9][a-zA-Z0-9._-]*");
|
||||
|
||||
/**
|
||||
|
||||
@@ -51,8 +51,8 @@ public class Version implements Comparable<Version> {
|
||||
strVersion.split("\\.");
|
||||
try {
|
||||
final int majorVersion = Integer.parseInt(versions[0]);
|
||||
final int minorVersion = versions.length > 1 ? Integer.parseInt(versions[1]) : 0;
|
||||
final int incrementalVersion = versions.length > 2 ? Integer.parseInt(versions[2]) : 0;
|
||||
final Integer minorVersion = versions.length > 1 ? Integer.parseInt(versions[1]) : null;
|
||||
final Integer incrementalVersion = versions.length > 2 ? Integer.parseInt(versions[2]) : null;
|
||||
|
||||
return new Version(
|
||||
majorVersion,
|
||||
@@ -67,44 +67,48 @@ public class Version implements Comparable<Version> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the most recent stable version compatible with the given {@link Version}.
|
||||
* Resolves the most appropriate stable version from a collection, based on a given input version.
|
||||
* <p>
|
||||
* The matching rules are:
|
||||
* <ul>
|
||||
* <li>If {@code from} specifies only a major version (e.g. {@code 1}), return the latest stable version
|
||||
* with the same major (e.g. {@code 1.2.3}).</li>
|
||||
* <li>If {@code from} specifies a major and minor version only (e.g. {@code 1.2}), return the latest
|
||||
* stable version with the same major and minor (e.g. {@code 1.2.3}).</li>
|
||||
* <li>If {@code from} specifies a full version with major, minor, and patch (e.g. {@code 1.2.2}),
|
||||
* then only return it if it is exactly present (and stable) in {@code versions}.
|
||||
* No "upgrade" is performed in this case.</li>
|
||||
* <li>If no suitable version is found, returns {@code null}.</li>
|
||||
* </ul>
|
||||
*
|
||||
* <p>Resolution strategy:</p>
|
||||
* <ol>
|
||||
* <li>If {@code from} is present in {@code versions}, return it directly.</li>
|
||||
* <li>Otherwise, return the latest version with the same major and minor.</li>
|
||||
* <li>If none found and {@code from.majorVersion() > 0}, return the latest with the same major.</li>
|
||||
* <li>Return {@code null} if no compatible version is found.</li>
|
||||
* </ol>
|
||||
*
|
||||
* @param from the current version
|
||||
* @param versions available versions
|
||||
* @return the most recent compatible stable version, or {@code null} if none
|
||||
* @param from the reference version (may specify only major, or major+minor, or major+minor+patch).
|
||||
* @param versions the collection of candidate versions to resolve against.
|
||||
* @return the best matching stable version, or {@code null} if none match.
|
||||
*/
|
||||
public static Version getStable(final Version from, final Collection<Version> versions) {
|
||||
// Case 1: "from" is only a major (e.g. 1)
|
||||
if (from.hasOnlyMajor()) {
|
||||
List<Version> sameMajor = versions.stream()
|
||||
.filter(v -> v.majorVersion() == from.majorVersion())
|
||||
.toList();
|
||||
return sameMajor.isEmpty() ? null : Version.getLatest(sameMajor);
|
||||
}
|
||||
|
||||
// Case 2: "from" is major+minor only (e.g. 1.2)
|
||||
if (from.hasMajorAndMinorOnly()) {
|
||||
List<Version> sameMinor = versions.stream()
|
||||
.filter(v -> v.majorVersion() == from.majorVersion()
|
||||
&& v.minorVersion() == from.minorVersion())
|
||||
.toList();
|
||||
return sameMinor.isEmpty() ? null : Version.getLatest(sameMinor);
|
||||
}
|
||||
|
||||
// Case 3: "from" is full version (major+minor+patch)
|
||||
if (versions.contains(from)) {
|
||||
return from;
|
||||
}
|
||||
|
||||
// Prefer same major+minor stable versions
|
||||
List<Version> sameMinorStable = versions.stream()
|
||||
.filter(v -> v.majorVersion() == from.majorVersion() && v.minorVersion() == from.minorVersion())
|
||||
.toList();
|
||||
|
||||
if (!sameMinorStable.isEmpty()) {
|
||||
return Version.getLatest(sameMinorStable);
|
||||
}
|
||||
|
||||
if (from.majorVersion() > 0) {
|
||||
// Fallback: any stable version with the same major
|
||||
List<Version> sameMajorStable = versions.stream()
|
||||
.filter(v -> v.majorVersion() == from.majorVersion())
|
||||
.toList();
|
||||
|
||||
if (!sameMajorStable.isEmpty()) {
|
||||
return Version.getLatest(sameMajorStable);
|
||||
}
|
||||
}
|
||||
// No match
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -155,8 +159,8 @@ public class Version implements Comparable<Version> {
|
||||
}
|
||||
|
||||
private final int majorVersion;
|
||||
private final int minorVersion;
|
||||
private final int incrementalVersion;
|
||||
private final Integer minorVersion;
|
||||
private final Integer patchVersion;
|
||||
private final Qualifier qualifier;
|
||||
|
||||
private final String originalVersion;
|
||||
@@ -166,14 +170,14 @@ public class Version implements Comparable<Version> {
|
||||
*
|
||||
* @param majorVersion the major version (must be superior or equal to 0).
|
||||
* @param minorVersion the minor version (must be superior or equal to 0).
|
||||
* @param incrementalVersion the incremental version (must be superior or equal to 0).
|
||||
* @param patchVersion the incremental version (must be superior or equal to 0).
|
||||
* @param qualifier the qualifier.
|
||||
*/
|
||||
public Version(final int majorVersion,
|
||||
final int minorVersion,
|
||||
final int incrementalVersion,
|
||||
final int patchVersion,
|
||||
final String qualifier) {
|
||||
this(majorVersion, minorVersion, incrementalVersion, qualifier, null);
|
||||
this(majorVersion, minorVersion, patchVersion, qualifier, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -181,25 +185,25 @@ public class Version implements Comparable<Version> {
|
||||
*
|
||||
* @param majorVersion the major version (must be superior or equal to 0).
|
||||
* @param minorVersion the minor version (must be superior or equal to 0).
|
||||
* @param incrementalVersion the incremental version (must be superior or equal to 0).
|
||||
* @param patchVersion the incremental version (must be superior or equal to 0).
|
||||
* @param qualifier the qualifier.
|
||||
* @param originalVersion the original string version.
|
||||
*/
|
||||
private Version(final int majorVersion,
|
||||
final int minorVersion,
|
||||
final int incrementalVersion,
|
||||
private Version(final Integer majorVersion,
|
||||
final Integer minorVersion,
|
||||
final Integer patchVersion,
|
||||
final String qualifier,
|
||||
final String originalVersion) {
|
||||
this.majorVersion = requirePositive(majorVersion, "major");
|
||||
this.minorVersion = requirePositive(minorVersion, "minor");
|
||||
this.incrementalVersion = requirePositive(incrementalVersion, "incremental");
|
||||
this.patchVersion = requirePositive(patchVersion, "incremental");
|
||||
this.qualifier = qualifier != null ? new Qualifier(qualifier) : null;
|
||||
this.originalVersion = originalVersion;
|
||||
}
|
||||
|
||||
|
||||
private static int requirePositive(int version, final String message) {
|
||||
if (version < 0) {
|
||||
private static Integer requirePositive(Integer version, final String message) {
|
||||
if (version != null && version < 0) {
|
||||
throw new IllegalArgumentException(String.format("The '%s' version must super or equal to 0", message));
|
||||
}
|
||||
return version;
|
||||
@@ -210,11 +214,11 @@ public class Version implements Comparable<Version> {
|
||||
}
|
||||
|
||||
public int minorVersion() {
|
||||
return minorVersion;
|
||||
return minorVersion != null ? minorVersion : 0;
|
||||
}
|
||||
|
||||
public int incrementalVersion() {
|
||||
return incrementalVersion;
|
||||
public int patchVersion() {
|
||||
return patchVersion != null ? patchVersion : 0;
|
||||
}
|
||||
|
||||
public Qualifier qualifier() {
|
||||
@@ -229,9 +233,9 @@ public class Version implements Comparable<Version> {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof Version)) return false;
|
||||
Version version = (Version) o;
|
||||
return majorVersion == version.majorVersion &&
|
||||
minorVersion == version.minorVersion &&
|
||||
incrementalVersion == version.incrementalVersion &&
|
||||
return Objects.equals(majorVersion,version.majorVersion) &&
|
||||
Objects.equals(minorVersion, version.minorVersion) &&
|
||||
Objects.equals(patchVersion,version.patchVersion) &&
|
||||
Objects.equals(qualifier, version.qualifier);
|
||||
}
|
||||
|
||||
@@ -240,7 +244,7 @@ public class Version implements Comparable<Version> {
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(majorVersion, minorVersion, incrementalVersion, qualifier);
|
||||
return Objects.hash(majorVersion, minorVersion, patchVersion, qualifier);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -250,7 +254,7 @@ public class Version implements Comparable<Version> {
|
||||
public String toString() {
|
||||
if (originalVersion != null) return originalVersion;
|
||||
|
||||
String version = majorVersion + "." + minorVersion + "." + incrementalVersion;
|
||||
String version = majorVersion + "." + minorVersion + "." + patchVersion;
|
||||
return (qualifier != null) ? version +"-" + qualifier : version;
|
||||
}
|
||||
|
||||
@@ -270,7 +274,7 @@ public class Version implements Comparable<Version> {
|
||||
return compareMinor;
|
||||
}
|
||||
|
||||
int compareIncremental = Integer.compare(that.incrementalVersion, this.incrementalVersion);
|
||||
int compareIncremental = Integer.compare(that.patchVersion, this.patchVersion);
|
||||
if (compareIncremental != 0) {
|
||||
return compareIncremental;
|
||||
}
|
||||
@@ -285,6 +289,21 @@ public class Version implements Comparable<Version> {
|
||||
|
||||
return this.qualifier.compareTo(that.qualifier);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return true if only major is specified (e.g. "1")
|
||||
*/
|
||||
private boolean hasOnlyMajor() {
|
||||
return minorVersion == null && patchVersion == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if major+minor are specified, but no patch (e.g. "1.2")
|
||||
*/
|
||||
private boolean hasMajorAndMinorOnly() {
|
||||
return minorVersion != null && patchVersion == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether this version is before the given one.
|
||||
|
||||
@@ -46,16 +46,19 @@ public class VersionProvider {
|
||||
this.date = loadTime(gitProperties);
|
||||
this.version = loadVersion(buildProperties, gitProperties);
|
||||
|
||||
// check the version in the settings and update if needed, we did't use it would allow us to detect incompatible update later if needed
|
||||
if (settingRepository.isPresent()) {
|
||||
Optional<Setting> versionSetting = settingRepository.get().findByKey(Setting.INSTANCE_VERSION);
|
||||
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(this.version)) {
|
||||
settingRepository.get().save(Setting.builder()
|
||||
.key(Setting.INSTANCE_VERSION)
|
||||
.value(this.version)
|
||||
.build()
|
||||
);
|
||||
}
|
||||
// check the version in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
|
||||
settingRepository.ifPresent(
|
||||
settingRepositoryInterface -> persistVersion(settingRepositoryInterface, version));
|
||||
}
|
||||
|
||||
private static synchronized void persistVersion(SettingRepositoryInterface settingRepositoryInterface, String version) {
|
||||
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_VERSION);
|
||||
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(version)) {
|
||||
settingRepositoryInterface.save(Setting.builder()
|
||||
.key(Setting.INSTANCE_VERSION)
|
||||
.value(version)
|
||||
.build()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
@@ -68,6 +69,7 @@ import java.util.Optional;
|
||||
)
|
||||
}
|
||||
)
|
||||
@Slf4j
|
||||
public class Exit extends Task implements ExecutionUpdatableTask {
|
||||
@NotNull
|
||||
@Schema(
|
||||
@@ -104,12 +106,13 @@ public class Exit extends Task implements ExecutionUpdatableTask {
|
||||
// ends all parents
|
||||
while (newTaskRun.getParentTaskRunId() != null) {
|
||||
newTaskRun = newExecution.findTaskRunByTaskRunId(newTaskRun.getParentTaskRunId()).withState(exitState);
|
||||
newExecution = execution.withTaskRun(newTaskRun);
|
||||
newExecution = newExecution.withTaskRun(newTaskRun);
|
||||
}
|
||||
return newExecution;
|
||||
} catch (InternalException e) {
|
||||
// in case we cannot update the last not terminated task run, we ignore it
|
||||
return execution;
|
||||
log.warn("Unable to update the taskrun state", e);
|
||||
return execution.withState(exitState);
|
||||
}
|
||||
})
|
||||
.orElse(execution)
|
||||
|
||||
@@ -216,49 +216,46 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
|
||||
|
||||
VariablesService variablesService = ((DefaultRunContext) runContext).getApplicationContext().getBean(VariablesService.class);
|
||||
if (this.wait) { // we only compute outputs if we wait for the subflow
|
||||
boolean isOutputsAllowed = runContext
|
||||
.<Boolean>pluginConfiguration(PLUGIN_FLOW_OUTPUTS_ENABLED)
|
||||
.orElse(true);
|
||||
|
||||
List<io.kestra.core.models.flows.Output> subflowOutputs = flow.getOutputs();
|
||||
|
||||
|
||||
// region [deprecated] Subflow outputs feature
|
||||
if (subflowOutputs == null && isOutputsAllowed && this.getOutputs() != null) {
|
||||
subflowOutputs = this.getOutputs().entrySet().stream()
|
||||
.<io.kestra.core.models.flows.Output>map(entry -> io.kestra.core.models.flows.Output
|
||||
.builder()
|
||||
.id(entry.getKey())
|
||||
.value(entry.getValue())
|
||||
.required(true)
|
||||
.build()
|
||||
)
|
||||
.toList();
|
||||
if (subflowOutputs == null && this.getOutputs() != null) {
|
||||
boolean isOutputsAllowed = runContext
|
||||
.<Boolean>pluginConfiguration(PLUGIN_FLOW_OUTPUTS_ENABLED)
|
||||
.orElse(true);
|
||||
if (isOutputsAllowed) {
|
||||
try {
|
||||
subflowOutputs = this.getOutputs().entrySet().stream()
|
||||
.<io.kestra.core.models.flows.Output>map(entry -> io.kestra.core.models.flows.Output
|
||||
.builder()
|
||||
.id(entry.getKey())
|
||||
.value(entry.getValue())
|
||||
.required(true)
|
||||
.build()
|
||||
)
|
||||
.toList();
|
||||
} catch (Exception e) {
|
||||
Variables variables = variablesService.of(StorageContext.forTask(taskRun), builder.build());
|
||||
return failSubflowDueToOutput(runContext, taskRun, execution, e, variables);
|
||||
}
|
||||
} else {
|
||||
runContext.logger().warn("Defining outputs inside the Subflow task is not allowed.");
|
||||
}
|
||||
}
|
||||
//endregion
|
||||
|
||||
if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
|
||||
try {
|
||||
Map<String, Object> outputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
|
||||
|
||||
Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
|
||||
|
||||
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
|
||||
if (flow.getOutputs() != null && flowInputOutput != null) {
|
||||
outputs = flowInputOutput.typedOutputs(flow, execution, outputs);
|
||||
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
|
||||
}
|
||||
builder.outputs(outputs);
|
||||
builder.outputs(rOutputs);
|
||||
} catch (Exception e) {
|
||||
runContext.logger().warn("Failed to extract outputs with the error: '{}'", e.getLocalizedMessage(), e);
|
||||
var state = State.Type.fail(this);
|
||||
Variables variables = variablesService.of(StorageContext.forTask(taskRun), builder.build());
|
||||
taskRun = taskRun
|
||||
.withState(state)
|
||||
.withAttempts(Collections.singletonList(TaskRunAttempt.builder().state(new State().withState(state)).build()))
|
||||
.withOutputs(variables);
|
||||
|
||||
return Optional.of(SubflowExecutionResult.builder()
|
||||
.executionId(execution.getId())
|
||||
.state(State.Type.FAILED)
|
||||
.parentTaskRun(taskRun)
|
||||
.build());
|
||||
return failSubflowDueToOutput(runContext, taskRun, execution, e, variables);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -282,6 +279,21 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
|
||||
return Optional.of(ExecutableUtils.subflowExecutionResult(taskRun, execution));
|
||||
}
|
||||
|
||||
private Optional<SubflowExecutionResult> failSubflowDueToOutput(RunContext runContext, TaskRun taskRun, Execution execution, Exception e, Variables outputs) {
|
||||
runContext.logger().error("Failed to extract outputs with the error: '{}'", e.getLocalizedMessage(), e);
|
||||
var state = State.Type.fail(this);
|
||||
taskRun = taskRun
|
||||
.withState(state)
|
||||
.withAttempts(Collections.singletonList(TaskRunAttempt.builder().state(new State().withState(state)).build()))
|
||||
.withOutputs(outputs);
|
||||
|
||||
return Optional.of(SubflowExecutionResult.builder()
|
||||
.executionId(execution.getId())
|
||||
.state(State.Type.FAILED)
|
||||
.parentTaskRun(taskRun)
|
||||
.build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForExecution() {
|
||||
return this.wait;
|
||||
|
||||
@@ -7,12 +7,11 @@ import io.kestra.core.junit.annotations.ExecuteFlow;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.repositories.TriggerRepositoryInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.services.GraphService;
|
||||
import io.kestra.core.utils.GraphUtils;
|
||||
@@ -45,7 +44,7 @@ class FlowGraphTest {
|
||||
private TriggerRepositoryInterface triggerRepositoryInterface;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
void simple() throws IllegalVariableEvaluationException, IOException {
|
||||
|
||||
@@ -3,6 +3,7 @@ package io.kestra.core.models.triggers.multipleflows;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import io.kestra.plugin.core.condition.ExecutionFlow;
|
||||
@@ -33,8 +34,9 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
@Test
|
||||
void allDefault() {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
|
||||
@@ -50,8 +52,9 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
@Test
|
||||
void daily() {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().window(Duration.ofDays(1)).windowAdvance(Duration.ofSeconds(0)).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().window(Duration.ofDays(1)).windowAdvance(Duration.ofSeconds(0)).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
|
||||
@@ -67,8 +70,9 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
@Test
|
||||
void dailyAdvance() {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().window(Duration.ofDays(1)).windowAdvance(Duration.ofHours(4).negated()).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().window(Duration.ofDays(1)).windowAdvance(Duration.ofHours(4).negated()).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
|
||||
@@ -84,8 +88,9 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
@Test
|
||||
void hourly() {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().window(Duration.ofHours(1)).windowAdvance(Duration.ofHours(4).negated()).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().window(Duration.ofHours(1)).windowAdvance(Duration.ofHours(4).negated()).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
|
||||
@@ -102,8 +107,9 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
@Test
|
||||
void minutely() {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().window(Duration.ofMinutes(15)).windowAdvance(Duration.ofMinutes(5).negated()).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().window(Duration.ofMinutes(15)).windowAdvance(Duration.ofMinutes(5).negated()).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
|
||||
@@ -115,8 +121,9 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
@Test
|
||||
void expiration() throws Exception {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().window(Duration.ofSeconds(2)).windowAdvance(Duration.ofMinutes(0).negated()).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().window(Duration.ofSeconds(2)).windowAdvance(Duration.ofMinutes(0).negated()).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
this.save(multipleConditionStorage, pair.getLeft(), Collections.singletonList(window.with(ImmutableMap.of("a", true))));
|
||||
@@ -136,8 +143,9 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
@Test
|
||||
void expired() throws Exception {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().window(Duration.ofSeconds(2)).windowAdvance(Duration.ofMinutes(0).negated()).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().window(Duration.ofSeconds(2)).windowAdvance(Duration.ofMinutes(0).negated()).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
this.save(multipleConditionStorage, pair.getLeft(), Collections.singletonList(window.with(ImmutableMap.of("a", true))));
|
||||
@@ -146,20 +154,21 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
|
||||
assertThat(window.getResults().get("a")).isTrue();
|
||||
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(null);
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(tenant);
|
||||
assertThat(expired.size()).isZero();
|
||||
|
||||
Thread.sleep(2005);
|
||||
|
||||
expired = multipleConditionStorage.expired(null);
|
||||
expired = multipleConditionStorage.expired(tenant);
|
||||
assertThat(expired.size()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void dailyTimeDeadline() throws Exception {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().type(Type.DAILY_TIME_DEADLINE).deadline(LocalTime.now().plusSeconds(2)).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().type(Type.DAILY_TIME_DEADLINE).deadline(LocalTime.now().plusSeconds(2)).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
this.save(multipleConditionStorage, pair.getLeft(), Collections.singletonList(window.with(ImmutableMap.of("a", true))));
|
||||
@@ -168,20 +177,21 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
|
||||
assertThat(window.getResults().get("a")).isTrue();
|
||||
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(null);
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(tenant);
|
||||
assertThat(expired.size()).isZero();
|
||||
|
||||
Thread.sleep(2005);
|
||||
|
||||
expired = multipleConditionStorage.expired(null);
|
||||
expired = multipleConditionStorage.expired(tenant);
|
||||
assertThat(expired.size()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void dailyTimeDeadline_Expired() throws Exception {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().type(Type.DAILY_TIME_DEADLINE).deadline(LocalTime.now().minusSeconds(1)).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().type(Type.DAILY_TIME_DEADLINE).deadline(LocalTime.now().minusSeconds(1)).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
this.save(multipleConditionStorage, pair.getLeft(), Collections.singletonList(window.with(ImmutableMap.of("a", true))));
|
||||
@@ -190,16 +200,17 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
|
||||
assertThat(window.getResults()).isEmpty();
|
||||
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(null);
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(tenant);
|
||||
assertThat(expired.size()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void dailyTimeWindow() throws Exception {
|
||||
void dailyTimeWindow() {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
LocalTime startTime = LocalTime.now().truncatedTo(ChronoUnit.MINUTES);
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().type(Type.DAILY_TIME_WINDOW).startTime(startTime).endTime(startTime.plusMinutes(5)).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().type(Type.DAILY_TIME_WINDOW).startTime(startTime).endTime(startTime.plusMinutes(5)).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
this.save(multipleConditionStorage, pair.getLeft(), Collections.singletonList(window.with(ImmutableMap.of("a", true))));
|
||||
@@ -208,15 +219,16 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
|
||||
assertThat(window.getResults().get("a")).isTrue();
|
||||
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(null);
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(tenant);
|
||||
assertThat(expired.size()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
void slidingWindow() throws Exception {
|
||||
MultipleConditionStorageInterface multipleConditionStorage = multipleConditionStorage();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(TimeWindow.builder().type(Type.SLIDING_WINDOW).window(Duration.ofHours(1)).build());
|
||||
Pair<Flow, MultipleCondition> pair = mockFlow(tenant, TimeWindow.builder().type(Type.SLIDING_WINDOW).window(Duration.ofHours(1)).build());
|
||||
|
||||
MultipleConditionWindow window = multipleConditionStorage.getOrCreate(pair.getKey(), pair.getRight(), Collections.emptyMap());
|
||||
this.save(multipleConditionStorage, pair.getLeft(), Collections.singletonList(window.with(ImmutableMap.of("a", true))));
|
||||
@@ -225,13 +237,13 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
|
||||
assertThat(window.getResults().get("a")).isTrue();
|
||||
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(null);
|
||||
List<MultipleConditionWindow> expired = multipleConditionStorage.expired(tenant);
|
||||
assertThat(expired.size()).isZero();
|
||||
}
|
||||
|
||||
private static Pair<Flow, MultipleCondition> mockFlow(TimeWindow sla) {
|
||||
private static Pair<Flow, MultipleCondition> mockFlow(String tenantId, TimeWindow sla) {
|
||||
var multipleCondition = MultipleCondition.builder()
|
||||
.id("condition-multiple")
|
||||
.id("condition-multiple-%s".formatted(tenantId))
|
||||
.conditions(ImmutableMap.of(
|
||||
"flow-a", ExecutionFlow.builder()
|
||||
.flowId(Property.ofValue("flow-a"))
|
||||
@@ -248,6 +260,7 @@ public abstract class AbstractMultipleConditionStorageTest {
|
||||
Flow flow = Flow.builder()
|
||||
.namespace(NAMESPACE)
|
||||
.id("multiple-flow")
|
||||
.tenantId(tenantId)
|
||||
.revision(1)
|
||||
.triggers(Collections.singletonList(io.kestra.plugin.core.trigger.Flow.builder()
|
||||
.id("trigger-flow")
|
||||
|
||||
@@ -13,21 +13,20 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
public abstract class AbstractFeatureUsageReportTest {
|
||||
|
||||
|
||||
@Inject
|
||||
FeatureUsageReport featureUsageReport;
|
||||
|
||||
|
||||
@Test
|
||||
public void shouldGetReport() {
|
||||
// When
|
||||
Instant now = Instant.now();
|
||||
FeatureUsageReport.UsageEvent event = featureUsageReport.report(
|
||||
now,
|
||||
now,
|
||||
Reportable.TimeInterval.of(now.minus(Duration.ofDays(1)).atZone(ZoneId.systemDefault()), now.atZone(ZoneId.systemDefault()))
|
||||
);
|
||||
|
||||
|
||||
// Then
|
||||
assertThat(event.getExecutions().getDailyExecutionsCount().size()).isGreaterThan(0);
|
||||
assertThat(event.getExecutions().getDailyTaskRunsCount()).isNull();
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ import io.kestra.core.models.tasks.ResolvedTask;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.NamespaceUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.plugin.core.dashboard.data.Executions;
|
||||
import io.kestra.plugin.core.debug.Return;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
@@ -48,7 +49,6 @@ import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.models.flows.FlowScope.USER;
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
@@ -62,17 +62,17 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
@Inject
|
||||
protected ExecutionRepositoryInterface executionRepository;
|
||||
|
||||
public static Execution.ExecutionBuilder builder(State.Type state, String flowId) {
|
||||
return builder(state, flowId, NAMESPACE);
|
||||
public static Execution.ExecutionBuilder builder(String tenantId, State.Type state, String flowId) {
|
||||
return builder(tenantId, state, flowId, NAMESPACE);
|
||||
}
|
||||
|
||||
public static Execution.ExecutionBuilder builder(State.Type state, String flowId, String namespace) {
|
||||
public static Execution.ExecutionBuilder builder(String tenantId, State.Type state, String flowId, String namespace) {
|
||||
State finalState = randomDuration(state);
|
||||
|
||||
Execution.ExecutionBuilder execution = Execution.builder()
|
||||
.id(FriendlyId.createFriendlyId())
|
||||
.namespace(namespace)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenantId)
|
||||
.flowId(flowId == null ? FLOW : flowId)
|
||||
.flowRevision(1)
|
||||
.state(finalState);
|
||||
@@ -126,11 +126,11 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
return finalState;
|
||||
}
|
||||
|
||||
protected void inject() {
|
||||
inject(null);
|
||||
protected void inject(String tenantId) {
|
||||
inject(tenantId, null);
|
||||
}
|
||||
|
||||
protected void inject(String executionTriggerId) {
|
||||
protected void inject(String tenantId, String executionTriggerId) {
|
||||
ExecutionTrigger executionTrigger = null;
|
||||
|
||||
if (executionTriggerId != null) {
|
||||
@@ -139,7 +139,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.build();
|
||||
}
|
||||
|
||||
executionRepository.save(builder(State.Type.RUNNING, null)
|
||||
executionRepository.save(builder(tenantId, State.Type.RUNNING, null)
|
||||
.labels(List.of(
|
||||
new Label("key", "value"),
|
||||
new Label("key2", "value2")
|
||||
@@ -149,6 +149,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
);
|
||||
for (int i = 1; i < 28; i++) {
|
||||
executionRepository.save(builder(
|
||||
tenantId,
|
||||
i < 5 ? State.Type.RUNNING : (i < 8 ? State.Type.FAILED : State.Type.SUCCESS),
|
||||
i < 15 ? null : "second"
|
||||
).trigger(executionTrigger).build());
|
||||
@@ -156,6 +157,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
// add a test execution, this should be ignored in search & statistics
|
||||
executionRepository.save(builder(
|
||||
tenantId,
|
||||
State.Type.SUCCESS,
|
||||
null
|
||||
)
|
||||
@@ -167,9 +169,10 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_find_all(QueryFilter filter, int expectedSize){
|
||||
inject("executionTriggerId");
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant, "executionTriggerId");
|
||||
|
||||
ArrayListTotal<Execution> entries = executionRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter));
|
||||
ArrayListTotal<Execution> entries = executionRepository.find(Pageable.UNPAGED, tenant, List.of(filter));
|
||||
|
||||
assertThat(entries).hasSize(expectedSize);
|
||||
}
|
||||
@@ -192,7 +195,8 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("errorFilterCombinations")
|
||||
void should_fail_to_find_all(QueryFilter filter){
|
||||
assertThrows(InvalidQueryFiltersException.class, () -> executionRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter)));
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
assertThrows(InvalidQueryFiltersException.class, () -> executionRepository.find(Pageable.UNPAGED, tenant, List.of(filter)));
|
||||
}
|
||||
|
||||
static Stream<QueryFilter> errorFilterCombinations() {
|
||||
@@ -208,9 +212,10 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
@Test
|
||||
protected void find() {
|
||||
inject();
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant);
|
||||
|
||||
ArrayListTotal<Execution> executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, null);
|
||||
ArrayListTotal<Execution> executions = executionRepository.find(Pageable.from(1, 10), tenant, null);
|
||||
assertThat(executions.getTotal()).isEqualTo(28L);
|
||||
assertThat(executions.size()).isEqualTo(10);
|
||||
|
||||
@@ -219,7 +224,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value( List.of(State.Type.RUNNING, State.Type.FAILED))
|
||||
.build());
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(8L);
|
||||
|
||||
filters = List.of(QueryFilter.builder()
|
||||
@@ -227,7 +232,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value(Map.of("key", "value"))
|
||||
.build());
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(1L);
|
||||
|
||||
filters = List.of(QueryFilter.builder()
|
||||
@@ -235,7 +240,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value(Map.of("key", "value2"))
|
||||
.build());
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(0L);
|
||||
|
||||
filters = List.of(QueryFilter.builder()
|
||||
@@ -244,7 +249,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.value(Map.of("key", "value", "keyTest", "valueTest"))
|
||||
.build()
|
||||
);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(0L);
|
||||
|
||||
filters = List.of(QueryFilter.builder()
|
||||
@@ -252,7 +257,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value("second")
|
||||
.build());
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(13L);
|
||||
|
||||
filters = List.of(QueryFilter.builder()
|
||||
@@ -266,7 +271,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.value(NAMESPACE)
|
||||
.build()
|
||||
);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(13L);
|
||||
|
||||
filters = List.of(QueryFilter.builder()
|
||||
@@ -274,7 +279,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.operation(QueryFilter.Op.STARTS_WITH)
|
||||
.value("io.kestra")
|
||||
.build());
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(28L);
|
||||
}
|
||||
|
||||
@@ -282,15 +287,16 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
protected void findTriggerExecutionId() {
|
||||
String executionTriggerId = IdUtils.create();
|
||||
|
||||
inject(executionTriggerId);
|
||||
inject();
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant, executionTriggerId);
|
||||
inject(tenant);
|
||||
|
||||
var filters = List.of(QueryFilter.builder()
|
||||
.field(QueryFilter.Field.TRIGGER_EXECUTION_ID)
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value(executionTriggerId)
|
||||
.build());
|
||||
ArrayListTotal<Execution> executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
ArrayListTotal<Execution> executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(28L);
|
||||
assertThat(executions.size()).isEqualTo(10);
|
||||
assertThat(executions.getFirst().getTrigger().getVariables().get("executionId")).isEqualTo(executionTriggerId);
|
||||
@@ -300,7 +306,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.value(ExecutionRepositoryInterface.ChildFilter.CHILD)
|
||||
.build());
|
||||
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(28L);
|
||||
assertThat(executions.size()).isEqualTo(10);
|
||||
assertThat(executions.getFirst().getTrigger().getVariables().get("executionId")).isEqualTo(executionTriggerId);
|
||||
@@ -311,20 +317,21 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.value(ExecutionRepositoryInterface.ChildFilter.MAIN)
|
||||
.build());
|
||||
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters );
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters );
|
||||
assertThat(executions.getTotal()).isEqualTo(28L);
|
||||
assertThat(executions.size()).isEqualTo(10);
|
||||
assertThat(executions.getFirst().getTrigger()).isNull();
|
||||
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, null);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, null);
|
||||
assertThat(executions.getTotal()).isEqualTo(56L);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void findWithSort() {
|
||||
inject();
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant);
|
||||
|
||||
ArrayListTotal<Execution> executions = executionRepository.find(Pageable.from(1, 10, Sort.of(Sort.Order.desc("id"))), MAIN_TENANT, null);
|
||||
ArrayListTotal<Execution> executions = executionRepository.find(Pageable.from(1, 10, Sort.of(Sort.Order.desc("id"))), tenant, null);
|
||||
assertThat(executions.getTotal()).isEqualTo(28L);
|
||||
assertThat(executions.size()).isEqualTo(10);
|
||||
|
||||
@@ -333,100 +340,92 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value(List.of(State.Type.RUNNING, State.Type.FAILED))
|
||||
.build());
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.getTotal()).isEqualTo(8L);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void findTaskRun() {
|
||||
inject();
|
||||
|
||||
ArrayListTotal<TaskRun> taskRuns = executionRepository.findTaskRun(Pageable.from(1, 10), MAIN_TENANT, null);
|
||||
assertThat(taskRuns.getTotal()).isEqualTo(74L);
|
||||
assertThat(taskRuns.size()).isEqualTo(10);
|
||||
|
||||
var filters = List.of(QueryFilter.builder()
|
||||
.field(QueryFilter.Field.LABELS)
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value(Map.of("key", "value"))
|
||||
.build());
|
||||
|
||||
taskRuns = executionRepository.findTaskRun(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
assertThat(taskRuns.getTotal()).isEqualTo(1L);
|
||||
assertThat(taskRuns.size()).isEqualTo(1);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
protected void findById() {
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_1);
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var execution1 = ExecutionFixture.EXECUTION_1(tenant);
|
||||
executionRepository.save(execution1);
|
||||
|
||||
Optional<Execution> full = executionRepository.findById(MAIN_TENANT, ExecutionFixture.EXECUTION_1.getId());
|
||||
Optional<Execution> full = executionRepository.findById(tenant, execution1.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
|
||||
full.ifPresent(current -> {
|
||||
assertThat(full.get().getId()).isEqualTo(ExecutionFixture.EXECUTION_1.getId());
|
||||
assertThat(full.get().getId()).isEqualTo(execution1.getId());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void shouldFindByIdTestExecution() {
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_TEST);
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var executionTest = ExecutionFixture.EXECUTION_TEST(tenant);
|
||||
executionRepository.save(executionTest);
|
||||
|
||||
Optional<Execution> full = executionRepository.findById(null, ExecutionFixture.EXECUTION_TEST.getId());
|
||||
Optional<Execution> full = executionRepository.findById(tenant, executionTest.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
|
||||
full.ifPresent(current -> {
|
||||
assertThat(full.get().getId()).isEqualTo(ExecutionFixture.EXECUTION_TEST.getId());
|
||||
assertThat(full.get().getId()).isEqualTo(executionTest.getId());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void purge() {
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_1);
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var execution1 = ExecutionFixture.EXECUTION_1(tenant);
|
||||
executionRepository.save(execution1);
|
||||
|
||||
Optional<Execution> full = executionRepository.findById(MAIN_TENANT, ExecutionFixture.EXECUTION_1.getId());
|
||||
Optional<Execution> full = executionRepository.findById(tenant, execution1.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
|
||||
executionRepository.purge(ExecutionFixture.EXECUTION_1);
|
||||
executionRepository.purge(execution1);
|
||||
|
||||
full = executionRepository.findById(null, ExecutionFixture.EXECUTION_1.getId());
|
||||
full = executionRepository.findById(tenant, execution1.getId());
|
||||
assertThat(full.isPresent()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void delete() {
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_1);
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var execution1 = ExecutionFixture.EXECUTION_1(tenant);
|
||||
executionRepository.save(execution1);
|
||||
|
||||
Optional<Execution> full = executionRepository.findById(MAIN_TENANT, ExecutionFixture.EXECUTION_1.getId());
|
||||
Optional<Execution> full = executionRepository.findById(tenant, execution1.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
|
||||
executionRepository.delete(ExecutionFixture.EXECUTION_1);
|
||||
executionRepository.delete(execution1);
|
||||
|
||||
full = executionRepository.findById(MAIN_TENANT, ExecutionFixture.EXECUTION_1.getId());
|
||||
full = executionRepository.findById(tenant, execution1.getId());
|
||||
assertThat(full.isPresent()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void mappingConflict() {
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_2);
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_1);
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_2(tenant));
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_1(tenant));
|
||||
|
||||
ArrayListTotal<Execution> page1 = executionRepository.findByFlowId(MAIN_TENANT, NAMESPACE, FLOW, Pageable.from(1, 10));
|
||||
ArrayListTotal<Execution> page1 = executionRepository.findByFlowId(tenant, NAMESPACE, FLOW, Pageable.from(1, 10));
|
||||
|
||||
assertThat(page1.size()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void dailyStatistics() throws InterruptedException {
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
for (int i = 0; i < 28; i++) {
|
||||
executionRepository.save(builder(
|
||||
tenant,
|
||||
i < 5 ? State.Type.RUNNING : (i < 8 ? State.Type.FAILED : State.Type.SUCCESS),
|
||||
i < 15 ? null : "second"
|
||||
).build());
|
||||
}
|
||||
|
||||
executionRepository.save(builder(
|
||||
tenant,
|
||||
State.Type.SUCCESS,
|
||||
"second"
|
||||
).namespace(NamespaceUtils.SYSTEM_FLOWS_DEFAULT_NAMESPACE).build());
|
||||
@@ -436,15 +435,14 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
List<DailyExecutionStatistics> result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
tenant,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
false);
|
||||
null);
|
||||
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().size()).isEqualTo(11);
|
||||
@@ -456,131 +454,52 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
tenant,
|
||||
List.of(FlowScope.USER, FlowScope.SYSTEM),
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
false);
|
||||
null);
|
||||
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(21L);
|
||||
|
||||
result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
tenant,
|
||||
List.of(FlowScope.USER),
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
false);
|
||||
null);
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(20L);
|
||||
|
||||
result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
tenant,
|
||||
List.of(FlowScope.SYSTEM),
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
false);
|
||||
null);
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void taskRunsDailyStatistics() {
|
||||
for (int i = 0; i < 28; i++) {
|
||||
executionRepository.save(builder(
|
||||
i < 5 ? State.Type.RUNNING : (i < 8 ? State.Type.FAILED : State.Type.SUCCESS),
|
||||
i < 15 ? null : "second"
|
||||
).build());
|
||||
}
|
||||
|
||||
executionRepository.save(builder(
|
||||
State.Type.SUCCESS,
|
||||
"second"
|
||||
).namespace(NamespaceUtils.SYSTEM_FLOWS_DEFAULT_NAMESPACE).build());
|
||||
|
||||
List<DailyExecutionStatistics> result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
true);
|
||||
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getDuration().getAvg().toMillis()).isGreaterThan(0L);
|
||||
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.FAILED)).isEqualTo(3L * 2);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.RUNNING)).isEqualTo(5L * 2);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(57L);
|
||||
|
||||
result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
List.of(FlowScope.USER, FlowScope.SYSTEM),
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
true);
|
||||
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(57L);
|
||||
|
||||
result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
List.of(FlowScope.USER),
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
true);
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(55L);
|
||||
|
||||
result = executionRepository.dailyStatistics(
|
||||
null,
|
||||
MAIN_TENANT,
|
||||
List.of(FlowScope.SYSTEM),
|
||||
null,
|
||||
null,
|
||||
ZonedDateTime.now().minusDays(10),
|
||||
ZonedDateTime.now(),
|
||||
null,
|
||||
null,
|
||||
true);
|
||||
assertThat(result.size()).isEqualTo(11);
|
||||
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(2L);
|
||||
}
|
||||
|
||||
@SuppressWarnings("OptionalGetWithoutIsPresent")
|
||||
@Test
|
||||
protected void executionsCount() throws InterruptedException {
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
for (int i = 0; i < 14; i++) {
|
||||
executionRepository.save(builder(
|
||||
tenant,
|
||||
State.Type.SUCCESS,
|
||||
i < 2 ? "first" : (i < 5 ? "second" : "third")
|
||||
).build());
|
||||
@@ -590,7 +509,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
Thread.sleep(500);
|
||||
|
||||
List<ExecutionCount> result = executionRepository.executionCounts(
|
||||
MAIN_TENANT,
|
||||
tenant,
|
||||
List.of(
|
||||
new Flow(NAMESPACE, "first"),
|
||||
new Flow(NAMESPACE, "second"),
|
||||
@@ -609,7 +528,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
assertThat(result.stream().filter(executionCount -> executionCount.getFlowId().equals("missing")).findFirst().get().getCount()).isEqualTo(0L);
|
||||
|
||||
result = executionRepository.executionCounts(
|
||||
MAIN_TENANT,
|
||||
tenant,
|
||||
List.of(
|
||||
new Flow(NAMESPACE, "first"),
|
||||
new Flow(NAMESPACE, "second"),
|
||||
@@ -626,7 +545,7 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
assertThat(result.stream().filter(executionCount -> executionCount.getFlowId().equals("third")).findFirst().get().getCount()).isEqualTo(9L);
|
||||
|
||||
result = executionRepository.executionCounts(
|
||||
MAIN_TENANT,
|
||||
tenant,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
@@ -639,14 +558,15 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
@Test
|
||||
protected void update() {
|
||||
Execution execution = ExecutionFixture.EXECUTION_1;
|
||||
executionRepository.save(ExecutionFixture.EXECUTION_1);
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Execution execution = ExecutionFixture.EXECUTION_1(tenant);
|
||||
executionRepository.save(execution);
|
||||
|
||||
Label label = new Label("key", "value");
|
||||
Execution updated = execution.toBuilder().labels(List.of(label)).build();
|
||||
executionRepository.update(updated);
|
||||
|
||||
Optional<Execution> validation = executionRepository.findById(MAIN_TENANT, updated.getId());
|
||||
Optional<Execution> validation = executionRepository.findById(tenant, updated.getId());
|
||||
assertThat(validation.isPresent()).isTrue();
|
||||
assertThat(validation.get().getLabels().size()).isEqualTo(1);
|
||||
assertThat(validation.get().getLabels().getFirst()).isEqualTo(label);
|
||||
@@ -654,13 +574,14 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
@Test
|
||||
void shouldFindLatestExecutionGivenState() {
|
||||
Execution earliest = buildWithCreatedDate(Instant.now().minus(Duration.ofMinutes(10)));
|
||||
Execution latest = buildWithCreatedDate(Instant.now().minus(Duration.ofMinutes(5)));
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Execution earliest = buildWithCreatedDate(tenant, Instant.now().minus(Duration.ofMinutes(10)));
|
||||
Execution latest = buildWithCreatedDate(tenant, Instant.now().minus(Duration.ofMinutes(5)));
|
||||
|
||||
executionRepository.save(earliest);
|
||||
executionRepository.save(latest);
|
||||
|
||||
Optional<Execution> result = executionRepository.findLatestForStates(MAIN_TENANT, "io.kestra.unittest", "full", List.of(State.Type.CREATED));
|
||||
Optional<Execution> result = executionRepository.findLatestForStates(tenant, "io.kestra.unittest", "full", List.of(State.Type.CREATED));
|
||||
assertThat(result.isPresent()).isTrue();
|
||||
assertThat(result.get().getId()).isEqualTo(latest.getId());
|
||||
}
|
||||
@@ -700,11 +621,11 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
assertThat(data.get(0).get("date")).isEqualTo(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").format(ZonedDateTime.ofInstant(startDate, ZoneId.systemDefault()).withSecond(0).withNano(0)));
|
||||
}
|
||||
|
||||
private static Execution buildWithCreatedDate(Instant instant) {
|
||||
private static Execution buildWithCreatedDate(String tenant, Instant instant) {
|
||||
return Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.flowId("full")
|
||||
.flowRevision(1)
|
||||
.state(new State(State.Type.CREATED, List.of(new State.History(State.Type.CREATED, instant))))
|
||||
@@ -715,22 +636,24 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
|
||||
@Test
|
||||
protected void findAllAsync() {
|
||||
inject();
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant);
|
||||
|
||||
List<Execution> executions = executionRepository.findAllAsync(MAIN_TENANT).collectList().block();
|
||||
List<Execution> executions = executionRepository.findAllAsync(tenant).collectList().block();
|
||||
assertThat(executions).hasSize(29); // used by the backup so it contains TEST executions
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void shouldFindByLabel() {
|
||||
inject();
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant);
|
||||
|
||||
List<QueryFilter> filters = List.of(QueryFilter.builder()
|
||||
.field(QueryFilter.Field.LABELS)
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value(Map.of("key", "value"))
|
||||
.build());
|
||||
List<Execution> executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
List<Execution> executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.size()).isEqualTo(1L);
|
||||
|
||||
// Filtering by two pairs of labels, since now its a and behavior, it should not return anything
|
||||
@@ -739,15 +662,16 @@ public abstract class AbstractExecutionRepositoryTest {
|
||||
.operation(QueryFilter.Op.EQUALS)
|
||||
.value(Map.of("key", "value", "keyother", "valueother"))
|
||||
.build());
|
||||
executions = executionRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
executions = executionRepository.find(Pageable.from(1, 10), tenant, filters);
|
||||
assertThat(executions.size()).isEqualTo(0L);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void shouldReturnLastExecutionsWhenInputsAreNull() {
|
||||
inject();
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
inject(tenant);
|
||||
|
||||
List<Execution> lastExecutions = executionRepository.lastExecutions(MAIN_TENANT, null);
|
||||
List<Execution> lastExecutions = executionRepository.lastExecutions(tenant, null);
|
||||
|
||||
assertThat(lastExecutions).isNotEmpty();
|
||||
Set<String> flowIds = lastExecutions.stream().map(Execution::getFlowId).collect(Collectors.toSet());
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.Helpers;
|
||||
import io.kestra.core.events.CrudEvent;
|
||||
import io.kestra.core.events.CrudEventType;
|
||||
import io.kestra.core.exceptions.InvalidQueryFiltersException;
|
||||
@@ -10,7 +9,6 @@ import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.QueryFilter;
|
||||
import io.kestra.core.models.QueryFilter.Field;
|
||||
import io.kestra.core.models.QueryFilter.Op;
|
||||
import io.kestra.core.models.SearchResult;
|
||||
import io.kestra.core.models.conditions.ConditionContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.ExecutionTrigger;
|
||||
@@ -20,7 +18,6 @@ import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.models.triggers.PollingTriggerInterface;
|
||||
import io.kestra.core.models.triggers.TriggerContext;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import io.kestra.core.utils.Await;
|
||||
@@ -29,22 +26,19 @@ import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.plugin.core.debug.Return;
|
||||
import io.micronaut.context.event.ApplicationEventListener;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import io.micronaut.data.model.Sort;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import lombok.*;
|
||||
import lombok.experimental.SuperBuilder;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.slf4j.event.Level;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.time.Duration;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.*;
|
||||
@@ -52,16 +46,12 @@ import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.models.flows.FlowScope.SYSTEM;
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static io.kestra.core.utils.NamespaceUtils.SYSTEM_FLOWS_DEFAULT_NAMESPACE;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
// If some counts are wrong in this test it means that one of the tests is not properly deleting what it created
|
||||
@KestraTest
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
public abstract class AbstractFlowRepositoryTest {
|
||||
public static final String TEST_TENANT_ID = "tenant";
|
||||
public static final String TEST_NAMESPACE = "io.kestra.unittest";
|
||||
public static final String TEST_FLOW_ID = "test";
|
||||
@Inject
|
||||
@@ -70,21 +60,18 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
@Inject
|
||||
protected ExecutionRepositoryInterface executionRepository;
|
||||
|
||||
@Inject
|
||||
private LocalFlowRepositoryLoader repositoryLoader;
|
||||
|
||||
@BeforeEach
|
||||
protected void init() throws IOException, URISyntaxException {
|
||||
TestsUtils.loads(MAIN_TENANT, repositoryLoader);
|
||||
@BeforeAll
|
||||
protected static void init() {
|
||||
FlowListener.reset();
|
||||
}
|
||||
|
||||
private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder() {
|
||||
return builder(IdUtils.create(), TEST_FLOW_ID);
|
||||
private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder(String tenantId) {
|
||||
return builder(tenantId, IdUtils.create(), TEST_FLOW_ID);
|
||||
}
|
||||
|
||||
private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder(String flowId, String taskId) {
|
||||
private static FlowWithSource.FlowWithSourceBuilder<?, ?> builder(String tenantId, String flowId, String taskId) {
|
||||
return FlowWithSource.builder()
|
||||
.tenantId(tenantId)
|
||||
.id(flowId)
|
||||
.namespace(TEST_NAMESPACE)
|
||||
.tasks(Collections.singletonList(Return.builder().id(taskId).type(Return.class.getName()).format(Property.ofValue(TEST_FLOW_ID)).build()));
|
||||
@@ -93,16 +80,16 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_find_all(QueryFilter filter){
|
||||
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource flow = FlowWithSource.builder()
|
||||
.id("filterFlowId")
|
||||
.namespace(SYSTEM_FLOWS_DEFAULT_NAMESPACE)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.labels(Label.from(Map.of("key", "value")))
|
||||
.build();
|
||||
flow = flowRepository.create(GenericFlow.of(flow));
|
||||
try {
|
||||
ArrayListTotal<Flow> entries = flowRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter));
|
||||
ArrayListTotal<Flow> entries = flowRepository.find(Pageable.UNPAGED, tenant, List.of(filter));
|
||||
|
||||
assertThat(entries).hasSize(1);
|
||||
} finally {
|
||||
@@ -113,16 +100,16 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_find_all_with_source(QueryFilter filter){
|
||||
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource flow = FlowWithSource.builder()
|
||||
.id("filterFlowId")
|
||||
.namespace(SYSTEM_FLOWS_DEFAULT_NAMESPACE)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.labels(Label.from(Map.of("key", "value")))
|
||||
.build();
|
||||
flow = flowRepository.create(GenericFlow.of(flow));
|
||||
try {
|
||||
ArrayListTotal<FlowWithSource> entries = flowRepository.findWithSource(Pageable.UNPAGED, MAIN_TENANT, List.of(filter));
|
||||
ArrayListTotal<FlowWithSource> entries = flowRepository.findWithSource(Pageable.UNPAGED, tenant, List.of(filter));
|
||||
|
||||
assertThat(entries).hasSize(1);
|
||||
} finally {
|
||||
@@ -144,7 +131,7 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
void should_fail_to_find_all(QueryFilter filter){
|
||||
assertThrows(
|
||||
InvalidQueryFiltersException.class,
|
||||
() -> flowRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter)));
|
||||
() -> flowRepository.find(Pageable.UNPAGED, TestsUtils.randomTenant(this.getClass().getSimpleName()), List.of(filter)));
|
||||
|
||||
}
|
||||
|
||||
@@ -153,7 +140,7 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
void should_fail_to_find_all_with_source(QueryFilter filter){
|
||||
assertThrows(
|
||||
InvalidQueryFiltersException.class,
|
||||
() -> flowRepository.findWithSource(Pageable.UNPAGED, MAIN_TENANT, List.of(filter)));
|
||||
() -> flowRepository.findWithSource(Pageable.UNPAGED, TestsUtils.randomTenant(this.getClass().getSimpleName()), List.of(filter)));
|
||||
|
||||
}
|
||||
|
||||
@@ -176,17 +163,17 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
void findById() {
|
||||
FlowWithSource flow = builder()
|
||||
.tenantId(MAIN_TENANT)
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource flow = builder(tenant)
|
||||
.revision(3)
|
||||
.build();
|
||||
flow = flowRepository.create(GenericFlow.of(flow));
|
||||
try {
|
||||
Optional<Flow> full = flowRepository.findById(MAIN_TENANT, flow.getNamespace(), flow.getId());
|
||||
Optional<Flow> full = flowRepository.findById(tenant, flow.getNamespace(), flow.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
assertThat(full.get().getRevision()).isEqualTo(1);
|
||||
|
||||
full = flowRepository.findById(MAIN_TENANT, flow.getNamespace(), flow.getId(), Optional.empty());
|
||||
full = flowRepository.findById(tenant, flow.getNamespace(), flow.getId(), Optional.empty());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
} finally {
|
||||
deleteFlow(flow);
|
||||
@@ -195,17 +182,18 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
void findByIdWithoutAcl() {
|
||||
FlowWithSource flow = builder()
|
||||
.tenantId(MAIN_TENANT)
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource flow = builder(tenant)
|
||||
.tenantId(tenant)
|
||||
.revision(3)
|
||||
.build();
|
||||
flow = flowRepository.create(GenericFlow.of(flow));
|
||||
try {
|
||||
Optional<Flow> full = flowRepository.findByIdWithoutAcl(MAIN_TENANT, flow.getNamespace(), flow.getId(), Optional.empty());
|
||||
Optional<Flow> full = flowRepository.findByIdWithoutAcl(tenant, flow.getNamespace(), flow.getId(), Optional.empty());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
assertThat(full.get().getRevision()).isEqualTo(1);
|
||||
|
||||
full = flowRepository.findByIdWithoutAcl(MAIN_TENANT, flow.getNamespace(), flow.getId(), Optional.empty());
|
||||
full = flowRepository.findByIdWithoutAcl(tenant, flow.getNamespace(), flow.getId(), Optional.empty());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
} finally {
|
||||
deleteFlow(flow);
|
||||
@@ -214,15 +202,16 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
void findByIdWithSource() {
|
||||
FlowWithSource flow = builder()
|
||||
.tenantId(MAIN_TENANT)
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource flow = builder(tenant)
|
||||
.tenantId(tenant)
|
||||
.revision(3)
|
||||
.build();
|
||||
String source = "# comment\n" + flow.sourceOrGenerateIfNull();
|
||||
flow = flowRepository.create(GenericFlow.fromYaml(MAIN_TENANT, source));
|
||||
flow = flowRepository.create(GenericFlow.fromYaml(tenant, source));
|
||||
|
||||
try {
|
||||
Optional<FlowWithSource> full = flowRepository.findByIdWithSource(MAIN_TENANT, flow.getNamespace(), flow.getId());
|
||||
Optional<FlowWithSource> full = flowRepository.findByIdWithSource(tenant, flow.getNamespace(), flow.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
|
||||
full.ifPresent(current -> {
|
||||
@@ -237,7 +226,8 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
void save() {
|
||||
FlowWithSource flow = builder().revision(12).build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource flow = builder(tenant).revision(12).build();
|
||||
FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
|
||||
|
||||
try {
|
||||
@@ -249,7 +239,8 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
void saveNoRevision() {
|
||||
FlowWithSource flow = builder().build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource flow = builder(tenant).build();
|
||||
FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
|
||||
|
||||
try {
|
||||
@@ -260,68 +251,17 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAll() {
|
||||
List<Flow> save = flowRepository.findAll(MAIN_TENANT);
|
||||
|
||||
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllWithSource() {
|
||||
List<FlowWithSource> save = flowRepository.findAllWithSource(MAIN_TENANT);
|
||||
|
||||
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllForAllTenants() {
|
||||
List<Flow> save = flowRepository.findAllForAllTenants();
|
||||
|
||||
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllWithSourceForAllTenants() {
|
||||
List<FlowWithSource> save = flowRepository.findAllWithSourceForAllTenants();
|
||||
|
||||
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findByNamespace() {
|
||||
List<Flow> save = flowRepository.findByNamespace(MAIN_TENANT, "io.kestra.tests");
|
||||
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT - 24);
|
||||
|
||||
save = flowRepository.findByNamespace(MAIN_TENANT, "io.kestra.tests2");
|
||||
assertThat((long) save.size()).isEqualTo(1L);
|
||||
|
||||
save = flowRepository.findByNamespace(MAIN_TENANT, "io.kestra.tests.minimal.bis");
|
||||
assertThat((long) save.size()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findByNamespacePrefix() {
|
||||
List<Flow> save = flowRepository.findByNamespacePrefix(MAIN_TENANT, "io.kestra.tests");
|
||||
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT - 1);
|
||||
|
||||
save = flowRepository.findByNamespace(MAIN_TENANT, "io.kestra.tests2");
|
||||
assertThat((long) save.size()).isEqualTo(1L);
|
||||
|
||||
save = flowRepository.findByNamespace(MAIN_TENANT, "io.kestra.tests.minimal.bis");
|
||||
assertThat((long) save.size()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findByNamespaceWithSource() {
|
||||
Flow flow = builder()
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Flow flow = builder(tenant)
|
||||
.revision(3)
|
||||
.build();
|
||||
String flowSource = "# comment\n" + flow.sourceOrGenerateIfNull();
|
||||
flow = flowRepository.create(GenericFlow.fromYaml(MAIN_TENANT, flowSource));
|
||||
flow = flowRepository.create(GenericFlow.fromYaml(tenant, flowSource));
|
||||
|
||||
try {
|
||||
List<FlowWithSource> save = flowRepository.findByNamespaceWithSource(MAIN_TENANT, flow.getNamespace());
|
||||
List<FlowWithSource> save = flowRepository.findByNamespaceWithSource(tenant, flow.getNamespace());
|
||||
assertThat((long) save.size()).isEqualTo(1L);
|
||||
|
||||
assertThat(save.getFirst().getSource()).isEqualTo(FlowService.cleanupSource(flowSource));
|
||||
@@ -330,175 +270,15 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void findByNamespacePrefixWithSource() {
|
||||
List<FlowWithSource> save = flowRepository.findByNamespacePrefixWithSource(MAIN_TENANT, "io.kestra.tests");
|
||||
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT - 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_paginationPartial() {
|
||||
assertThat(flowRepository.find(Pageable.from(1, (int) Helpers.FLOWS_COUNT - 1, Sort.UNSORTED), MAIN_TENANT, null)
|
||||
.size())
|
||||
.describedAs("When paginating at MAX-1, it should return MAX-1")
|
||||
.isEqualTo(Helpers.FLOWS_COUNT - 1);
|
||||
|
||||
assertThat(flowRepository.findWithSource(Pageable.from(1, (int) Helpers.FLOWS_COUNT - 1, Sort.UNSORTED), MAIN_TENANT, null)
|
||||
.size())
|
||||
.describedAs("When paginating at MAX-1, it should return MAX-1")
|
||||
.isEqualTo(Helpers.FLOWS_COUNT - 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_paginationGreaterThanExisting() {
|
||||
assertThat(flowRepository.find(Pageable.from(1, (int) Helpers.FLOWS_COUNT + 1, Sort.UNSORTED), MAIN_TENANT, null)
|
||||
.size())
|
||||
.describedAs("When paginating requesting a larger amount than existing, it should return existing MAX")
|
||||
.isEqualTo(Helpers.FLOWS_COUNT);
|
||||
assertThat(flowRepository.findWithSource(Pageable.from(1, (int) Helpers.FLOWS_COUNT + 1, Sort.UNSORTED), MAIN_TENANT, null)
|
||||
.size())
|
||||
.describedAs("When paginating requesting a larger amount than existing, it should return existing MAX")
|
||||
.isEqualTo(Helpers.FLOWS_COUNT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_prefixMatchingAllNamespaces() {
|
||||
assertThat(flowRepository.find(
|
||||
Pageable.UNPAGED,
|
||||
MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.STARTS_WITH).value("io.kestra.tests").build()
|
||||
)
|
||||
).size())
|
||||
.describedAs("When filtering on NAMESPACE START_WITH a pattern that match all, it should return all")
|
||||
.isEqualTo(Helpers.FLOWS_COUNT);
|
||||
|
||||
assertThat(flowRepository.findWithSource(
|
||||
Pageable.UNPAGED,
|
||||
MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.STARTS_WITH).value("io.kestra.tests").build()
|
||||
)
|
||||
).size())
|
||||
.describedAs("When filtering on NAMESPACE START_WITH a pattern that match all, it should return all")
|
||||
.isEqualTo(Helpers.FLOWS_COUNT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_aSpecifiedNamespace() {
|
||||
assertThat(flowRepository.find(
|
||||
Pageable.UNPAGED,
|
||||
MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests2").build()
|
||||
)
|
||||
).size()).isEqualTo(1L);
|
||||
|
||||
assertThat(flowRepository.findWithSource(
|
||||
Pageable.UNPAGED,
|
||||
MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests2").build()
|
||||
)
|
||||
).size()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_aSpecificSubNamespace() {
|
||||
assertThat(flowRepository.find(
|
||||
Pageable.UNPAGED,
|
||||
MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests.minimal.bis").build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(1L);
|
||||
|
||||
assertThat(flowRepository.findWithSource(
|
||||
Pageable.UNPAGED,
|
||||
MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests.minimal.bis").build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_aSpecificLabel() {
|
||||
assertThat(
|
||||
flowRepository.find(Pageable.UNPAGED, MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("country", "FR")).build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(1);
|
||||
|
||||
assertThat(
|
||||
flowRepository.findWithSource(Pageable.UNPAGED, MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("country", "FR")).build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_aSpecificFlowByNamespaceAndLabel() {
|
||||
assertThat(
|
||||
flowRepository.find(Pageable.UNPAGED, MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
|
||||
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key2", "value2")).build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(1);
|
||||
|
||||
assertThat(
|
||||
flowRepository.findWithSource(Pageable.UNPAGED, MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
|
||||
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key2", "value2")).build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find_noResult_forAnUnknownNamespace() {
|
||||
assertThat(
|
||||
flowRepository.find(Pageable.UNPAGED, MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
|
||||
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key1", "value2")).build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(0);
|
||||
|
||||
assertThat(
|
||||
flowRepository.findWithSource(Pageable.UNPAGED, MAIN_TENANT,
|
||||
List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
|
||||
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key1", "value2")).build()
|
||||
)
|
||||
).size())
|
||||
.isEqualTo(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void findSpecialChars() {
|
||||
ArrayListTotal<SearchResult<Flow>> save = flowRepository.findSourceCode(Pageable.unpaged(), "https://api.chucknorris.io", MAIN_TENANT, null);
|
||||
assertThat((long) save.size()).isEqualTo(2L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void delete() {
|
||||
Flow flow = builder().tenantId(MAIN_TENANT).build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Flow flow = builder(tenant).tenantId(tenant).build();
|
||||
|
||||
FlowWithSource save = flowRepository.create(GenericFlow.of(flow));
|
||||
|
||||
try {
|
||||
assertThat(flowRepository.findById(MAIN_TENANT, save.getNamespace(), save.getId()).isPresent()).isTrue();
|
||||
assertThat(flowRepository.findById(tenant, save.getNamespace(), save.getId()).isPresent()).isTrue();
|
||||
} catch (Throwable e) {
|
||||
deleteFlow(save);
|
||||
throw e;
|
||||
@@ -506,21 +286,22 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
Flow delete = flowRepository.delete(save);
|
||||
|
||||
assertThat(flowRepository.findById(MAIN_TENANT, flow.getNamespace(), flow.getId()).isPresent()).isFalse();
|
||||
assertThat(flowRepository.findById(MAIN_TENANT, flow.getNamespace(), flow.getId(), Optional.of(save.getRevision())).isPresent()).isTrue();
|
||||
assertThat(flowRepository.findById(tenant, flow.getNamespace(), flow.getId()).isPresent()).isFalse();
|
||||
assertThat(flowRepository.findById(tenant, flow.getNamespace(), flow.getId(), Optional.of(save.getRevision())).isPresent()).isTrue();
|
||||
|
||||
List<FlowWithSource> revisions = flowRepository.findRevisions(MAIN_TENANT, flow.getNamespace(), flow.getId());
|
||||
List<FlowWithSource> revisions = flowRepository.findRevisions(tenant, flow.getNamespace(), flow.getId());
|
||||
assertThat(revisions.getLast().getRevision()).isEqualTo(delete.getRevision());
|
||||
}
|
||||
|
||||
@Test
|
||||
void updateConflict() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
String flowId = IdUtils.create();
|
||||
|
||||
Flow flow = Flow.builder()
|
||||
.id(flowId)
|
||||
.namespace(TEST_NAMESPACE)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.inputs(List.of(StringInput.builder().type(Type.STRING).id("a").build()))
|
||||
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.ofValue(TEST_FLOW_ID)).build()))
|
||||
.build();
|
||||
@@ -528,12 +309,12 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
Flow save = flowRepository.create(GenericFlow.of(flow));
|
||||
|
||||
try {
|
||||
assertThat(flowRepository.findById(MAIN_TENANT, flow.getNamespace(), flow.getId()).isPresent()).isTrue();
|
||||
assertThat(flowRepository.findById(tenant, flow.getNamespace(), flow.getId()).isPresent()).isTrue();
|
||||
|
||||
Flow update = Flow.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest2")
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.inputs(List.of(StringInput.builder().type(Type.STRING).id("b").build()))
|
||||
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.ofValue(TEST_FLOW_ID)).build()))
|
||||
.build();
|
||||
@@ -551,13 +332,14 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void removeTrigger() throws TimeoutException, QueueException {
|
||||
public void removeTrigger() throws TimeoutException {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
String flowId = IdUtils.create();
|
||||
|
||||
Flow flow = Flow.builder()
|
||||
.id(flowId)
|
||||
.namespace(TEST_NAMESPACE)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.triggers(Collections.singletonList(UnitTest.builder()
|
||||
.id("sleep")
|
||||
.type(UnitTest.class.getName())
|
||||
@@ -567,12 +349,12 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
flow = flowRepository.create(GenericFlow.of(flow));
|
||||
try {
|
||||
assertThat(flowRepository.findById(MAIN_TENANT, flow.getNamespace(), flow.getId()).isPresent()).isTrue();
|
||||
assertThat(flowRepository.findById(tenant, flow.getNamespace(), flow.getId()).isPresent()).isTrue();
|
||||
|
||||
Flow update = Flow.builder()
|
||||
.id(flowId)
|
||||
.namespace(TEST_NAMESPACE)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.tasks(Collections.singletonList(Return.builder().id(TEST_FLOW_ID).type(Return.class.getName()).format(Property.ofValue(TEST_FLOW_ID)).build()))
|
||||
.build();
|
||||
;
|
||||
@@ -583,21 +365,25 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
deleteFlow(flow);
|
||||
}
|
||||
|
||||
Await.until(() -> FlowListener.getEmits().size() == 3, Duration.ofMillis(100), Duration.ofSeconds(5));
|
||||
assertThat(FlowListener.getEmits().stream().filter(r -> r.getType() == CrudEventType.CREATE).count()).isEqualTo(1L);
|
||||
assertThat(FlowListener.getEmits().stream().filter(r -> r.getType() == CrudEventType.UPDATE).count()).isEqualTo(1L);
|
||||
assertThat(FlowListener.getEmits().stream().filter(r -> r.getType() == CrudEventType.DELETE).count()).isEqualTo(1L);
|
||||
Await.until(() -> FlowListener.filterByTenant(tenant)
|
||||
.size() == 3, Duration.ofMillis(100), Duration.ofSeconds(5));
|
||||
assertThat(FlowListener.filterByTenant(tenant).stream()
|
||||
.filter(r -> r.getType() == CrudEventType.CREATE).count()).isEqualTo(1L);
|
||||
assertThat(FlowListener.filterByTenant(tenant).stream()
|
||||
.filter(r -> r.getType() == CrudEventType.UPDATE).count()).isEqualTo(1L);
|
||||
assertThat(FlowListener.filterByTenant(tenant).stream()
|
||||
.filter(r -> r.getType() == CrudEventType.DELETE).count()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void removeTriggerDelete() throws TimeoutException {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
String flowId = IdUtils.create();
|
||||
|
||||
Flow flow = Flow.builder()
|
||||
.id(flowId)
|
||||
.namespace(TEST_NAMESPACE)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.triggers(Collections.singletonList(UnitTest.builder()
|
||||
.id("sleep")
|
||||
.type(UnitTest.class.getName())
|
||||
@@ -607,40 +393,39 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
Flow save = flowRepository.create(GenericFlow.of(flow));
|
||||
try {
|
||||
assertThat(flowRepository.findById(MAIN_TENANT, flow.getNamespace(), flow.getId()).isPresent()).isTrue();
|
||||
assertThat(flowRepository.findById(tenant, flow.getNamespace(), flow.getId()).isPresent()).isTrue();
|
||||
} finally {
|
||||
deleteFlow(save);
|
||||
}
|
||||
|
||||
Await.until(() -> FlowListener.getEmits().size() == 2, Duration.ofMillis(100), Duration.ofSeconds(5));
|
||||
assertThat(FlowListener.getEmits().stream().filter(r -> r.getType() == CrudEventType.CREATE).count()).isEqualTo(1L);
|
||||
assertThat(FlowListener.getEmits().stream().filter(r -> r.getType() == CrudEventType.DELETE).count()).isEqualTo(1L);
|
||||
Await.until(() -> FlowListener.filterByTenant(tenant)
|
||||
.size() == 2, Duration.ofMillis(100), Duration.ofSeconds(5));
|
||||
assertThat(FlowListener.filterByTenant(tenant).stream()
|
||||
.filter(r -> r.getType() == CrudEventType.CREATE).count()).isEqualTo(1L);
|
||||
assertThat(FlowListener.filterByTenant(tenant).stream()
|
||||
.filter(r -> r.getType() == CrudEventType.DELETE).count()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findDistinctNamespace() {
|
||||
List<String> distinctNamespace = flowRepository.findDistinctNamespace(MAIN_TENANT);
|
||||
assertThat((long) distinctNamespace.size()).isEqualTo(9L);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void shouldReturnNullRevisionForNonExistingFlow() {
|
||||
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, IdUtils.create())).isNull();
|
||||
assertThat(flowRepository.lastRevision(TestsUtils.randomTenant(this.getClass().getSimpleName()), TEST_NAMESPACE, IdUtils.create())).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void shouldReturnLastRevisionOnCreate() {
|
||||
// Given
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
final List<Flow> toDelete = new ArrayList<>();
|
||||
final String flowId = IdUtils.create();
|
||||
try {
|
||||
// When
|
||||
toDelete.add(flowRepository.create(createTestingLogFlow(flowId, "???")));
|
||||
Integer result = flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId);
|
||||
toDelete.add(flowRepository.create(createTestingLogFlow(tenant, flowId, "???")));
|
||||
Integer result = flowRepository.lastRevision(tenant, TEST_NAMESPACE, flowId);
|
||||
|
||||
// Then
|
||||
assertThat(result).isEqualTo(1);
|
||||
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId)).isEqualTo(1);
|
||||
assertThat(flowRepository.lastRevision(tenant, TEST_NAMESPACE, flowId)).isEqualTo(1);
|
||||
} finally {
|
||||
toDelete.forEach(this::deleteFlow);
|
||||
}
|
||||
@@ -649,34 +434,36 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
@Test
|
||||
protected void shouldIncrementRevisionOnDelete() {
|
||||
// Given
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
final String flowId = IdUtils.create();
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
|
||||
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size()).isEqualTo(1);
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(tenant, flowId, "first"));
|
||||
assertThat(flowRepository.findRevisions(tenant, TEST_NAMESPACE, flowId).size()).isEqualTo(1);
|
||||
|
||||
// When
|
||||
flowRepository.delete(created);
|
||||
|
||||
// Then
|
||||
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size()).isEqualTo(2);
|
||||
assertThat(flowRepository.findRevisions(tenant, TEST_NAMESPACE, flowId).size()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void shouldIncrementRevisionOnCreateAfterDelete() {
|
||||
// Given
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
final List<Flow> toDelete = new ArrayList<>();
|
||||
final String flowId = IdUtils.create();
|
||||
try {
|
||||
// Given
|
||||
flowRepository.delete(
|
||||
flowRepository.create(createTestingLogFlow(flowId, "first"))
|
||||
flowRepository.create(createTestingLogFlow(tenant, flowId, "first"))
|
||||
);
|
||||
|
||||
// When
|
||||
toDelete.add(flowRepository.create(createTestingLogFlow(flowId, "second")));
|
||||
toDelete.add(flowRepository.create(createTestingLogFlow(tenant, flowId, "second")));
|
||||
|
||||
// Then
|
||||
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size()).isEqualTo(3);
|
||||
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId)).isEqualTo(3);
|
||||
assertThat(flowRepository.findRevisions(tenant, TEST_NAMESPACE, flowId).size()).isEqualTo(3);
|
||||
assertThat(flowRepository.lastRevision(tenant, TEST_NAMESPACE, flowId)).isEqualTo(3);
|
||||
} finally {
|
||||
toDelete.forEach(this::deleteFlow);
|
||||
}
|
||||
@@ -685,22 +472,23 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
@Test
|
||||
protected void shouldReturnNullForLastRevisionAfterDelete() {
|
||||
// Given
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
final List<Flow> toDelete = new ArrayList<>();
|
||||
final String flowId = IdUtils.create();
|
||||
try {
|
||||
// Given
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(tenant, flowId, "first"));
|
||||
toDelete.add(created);
|
||||
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(tenant, flowId, "second"), created);
|
||||
toDelete.add(updated);
|
||||
|
||||
// When
|
||||
flowRepository.delete(updated);
|
||||
|
||||
// Then
|
||||
assertThat(flowRepository.findById(TEST_TENANT_ID, TEST_NAMESPACE, flowId, Optional.empty())).isEqualTo(Optional.empty());
|
||||
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId)).isNull();
|
||||
assertThat(flowRepository.findById(tenant, TEST_NAMESPACE, flowId, Optional.empty())).isEqualTo(Optional.empty());
|
||||
assertThat(flowRepository.lastRevision(tenant, TEST_NAMESPACE, flowId)).isNull();
|
||||
} finally {
|
||||
toDelete.forEach(this::deleteFlow);
|
||||
}
|
||||
@@ -709,22 +497,23 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
@Test
|
||||
protected void shouldFindAllRevisionsAfterDelete() {
|
||||
// Given
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
final List<Flow> toDelete = new ArrayList<>();
|
||||
final String flowId = IdUtils.create();
|
||||
try {
|
||||
// Given
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(tenant, flowId, "first"));
|
||||
toDelete.add(created);
|
||||
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(tenant, flowId, "second"), created);
|
||||
toDelete.add(updated);
|
||||
|
||||
// When
|
||||
flowRepository.delete(updated);
|
||||
|
||||
// Then
|
||||
assertThat(flowRepository.findById(TEST_TENANT_ID, TEST_NAMESPACE, flowId, Optional.empty())).isEqualTo(Optional.empty());
|
||||
assertThat(flowRepository.findRevisions(TEST_TENANT_ID, TEST_NAMESPACE, flowId).size()).isEqualTo(3);
|
||||
assertThat(flowRepository.findById(tenant, TEST_NAMESPACE, flowId, Optional.empty())).isEqualTo(Optional.empty());
|
||||
assertThat(flowRepository.findRevisions(tenant, TEST_NAMESPACE, flowId).size()).isEqualTo(3);
|
||||
} finally {
|
||||
toDelete.forEach(this::deleteFlow);
|
||||
}
|
||||
@@ -732,21 +521,22 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
protected void shouldIncrementRevisionOnUpdateGivenNotEqualSource() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
final List<Flow> toDelete = new ArrayList<>();
|
||||
final String flowId = IdUtils.create();
|
||||
try {
|
||||
|
||||
// Given
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(tenant, flowId, "first"));
|
||||
toDelete.add(created);
|
||||
|
||||
// When
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "second"), created);
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(tenant, flowId, "second"), created);
|
||||
toDelete.add(updated);
|
||||
|
||||
// Then
|
||||
assertThat(updated.getRevision()).isEqualTo(2);
|
||||
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId)).isEqualTo(2);
|
||||
assertThat(flowRepository.lastRevision(tenant, TEST_NAMESPACE, flowId)).isEqualTo(2);
|
||||
|
||||
} finally {
|
||||
toDelete.forEach(this::deleteFlow);
|
||||
@@ -755,48 +545,39 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
protected void shouldNotIncrementRevisionOnUpdateGivenEqualSource() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
final List<Flow> toDelete = new ArrayList<>();
|
||||
final String flowId = IdUtils.create();
|
||||
try {
|
||||
|
||||
// Given
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(flowId, "first"));
|
||||
FlowWithSource created = flowRepository.create(createTestingLogFlow(tenant, flowId, "first"));
|
||||
toDelete.add(created);
|
||||
|
||||
// When
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(flowId, "first"), created);
|
||||
FlowWithSource updated = flowRepository.update(createTestingLogFlow(tenant, flowId, "first"), created);
|
||||
toDelete.add(updated);
|
||||
|
||||
// Then
|
||||
assertThat(updated.getRevision()).isEqualTo(1);
|
||||
assertThat(flowRepository.lastRevision(TEST_TENANT_ID, TEST_NAMESPACE, flowId)).isEqualTo(1);
|
||||
assertThat(flowRepository.lastRevision(tenant, TEST_NAMESPACE, flowId)).isEqualTo(1);
|
||||
|
||||
} finally {
|
||||
toDelete.forEach(this::deleteFlow);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReturnForGivenQueryWildCardFilters() {
|
||||
List<QueryFilter> filters = List.of(
|
||||
QueryFilter.builder().field(QueryFilter.Field.QUERY).operation(QueryFilter.Op.EQUALS).value("*").build()
|
||||
);
|
||||
ArrayListTotal<Flow> flows = flowRepository.find(Pageable.from(1, 10), MAIN_TENANT, filters);
|
||||
assertThat(flows.size()).isEqualTo(10);
|
||||
assertThat(flows.getTotal()).isEqualTo(Helpers.FLOWS_COUNT);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findByExecution() {
|
||||
Flow flow = builder()
|
||||
.tenantId(MAIN_TENANT)
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Flow flow = builder(tenant)
|
||||
.revision(1)
|
||||
.build();
|
||||
flowRepository.create(GenericFlow.of(flow));
|
||||
Execution execution = Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace(flow.getNamespace())
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenant)
|
||||
.flowId(flow.getId())
|
||||
.flowRevision(flow.getRevision())
|
||||
.state(new State())
|
||||
@@ -821,11 +602,13 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
void findByExecutionNoRevision() {
|
||||
Flow flow = builder()
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Flow flow = builder(tenant)
|
||||
.revision(3)
|
||||
.build();
|
||||
flowRepository.create(GenericFlow.of(flow));
|
||||
Execution execution = Execution.builder()
|
||||
.tenantId(tenant)
|
||||
.id(IdUtils.create())
|
||||
.namespace(flow.getNamespace())
|
||||
.flowId(flow.getId())
|
||||
@@ -851,13 +634,14 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
|
||||
@Test
|
||||
void shouldCountForNullTenant() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
FlowWithSource toDelete = null;
|
||||
try {
|
||||
// Given
|
||||
Flow flow = createTestFlowForNamespace(TEST_NAMESPACE);
|
||||
Flow flow = createTestFlowForNamespace(tenant, TEST_NAMESPACE);
|
||||
toDelete = flowRepository.create(GenericFlow.of(flow));
|
||||
// When
|
||||
int count = flowRepository.count(MAIN_TENANT);
|
||||
int count = flowRepository.count(tenant);
|
||||
|
||||
// Then
|
||||
Assertions.assertTrue(count > 0);
|
||||
@@ -868,11 +652,11 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
}
|
||||
}
|
||||
|
||||
private static Flow createTestFlowForNamespace(String namespace) {
|
||||
private static Flow createTestFlowForNamespace(String tenantId, String namespace) {
|
||||
return Flow.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace(namespace)
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenantId)
|
||||
.tasks(List.of(Return.builder()
|
||||
.id(IdUtils.create())
|
||||
.type(Return.class.getName())
|
||||
@@ -891,21 +675,31 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
}
|
||||
|
||||
@Singleton
|
||||
public static class FlowListener implements ApplicationEventListener<CrudEvent<Flow>> {
|
||||
@Getter
|
||||
private static List<CrudEvent<Flow>> emits = new ArrayList<>();
|
||||
public static class FlowListener implements ApplicationEventListener<CrudEvent<AbstractFlow>> {
|
||||
private static List<CrudEvent<AbstractFlow>> emits = new CopyOnWriteArrayList<>();
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(CrudEvent<Flow> event) {
|
||||
emits.add(event);
|
||||
public void onApplicationEvent(CrudEvent<AbstractFlow> event) {
|
||||
//This has to be done because Micronaut may send CrudEvent<Setting> for example, and we don't want them.
|
||||
if ((event.getModel() != null && event.getModel() instanceof AbstractFlow)||
|
||||
(event.getPreviousModel() != null && event.getPreviousModel() instanceof AbstractFlow)) {
|
||||
emits.add(event);
|
||||
}
|
||||
}
|
||||
|
||||
public static void reset() {
|
||||
emits = new ArrayList<>();
|
||||
emits = new CopyOnWriteArrayList<>();
|
||||
}
|
||||
|
||||
public static List<CrudEvent<AbstractFlow>> filterByTenant(String tenantId){
|
||||
return emits.stream()
|
||||
.filter(e -> (e.getPreviousModel() != null && e.getPreviousModel().getTenantId().equals(tenantId)) ||
|
||||
(e.getModel() != null && e.getModel().getTenantId().equals(tenantId)))
|
||||
.toList();
|
||||
}
|
||||
}
|
||||
|
||||
private static GenericFlow createTestingLogFlow(String id, String logMessage) {
|
||||
private static GenericFlow createTestingLogFlow(String tenantId, String id, String logMessage) {
|
||||
String source = """
|
||||
id: %s
|
||||
namespace: %s
|
||||
@@ -914,7 +708,7 @@ public abstract class AbstractFlowRepositoryTest {
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: %s
|
||||
""".formatted(id, TEST_NAMESPACE, logMessage);
|
||||
return GenericFlow.fromYaml(TEST_TENANT_ID, source);
|
||||
return GenericFlow.fromYaml(tenantId, source);
|
||||
}
|
||||
|
||||
protected static int COUNTER = 0;
|
||||
|
||||
@@ -4,7 +4,7 @@ import io.kestra.core.models.topologies.FlowNode;
|
||||
import io.kestra.core.models.topologies.FlowRelation;
|
||||
import io.kestra.core.models.topologies.FlowTopology;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.tenant.TenantService;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -17,21 +17,21 @@ public abstract class AbstractFlowTopologyRepositoryTest {
|
||||
@Inject
|
||||
private FlowTopologyRepositoryInterface flowTopologyRepository;
|
||||
|
||||
protected FlowTopology createSimpleFlowTopology(String flowA, String flowB, String namespace) {
|
||||
protected FlowTopology createSimpleFlowTopology(String tenantId, String flowA, String flowB, String namespace) {
|
||||
return FlowTopology.builder()
|
||||
.relation(FlowRelation.FLOW_TASK)
|
||||
.source(FlowNode.builder()
|
||||
.id(flowA)
|
||||
.namespace(namespace)
|
||||
.tenantId(TenantService.MAIN_TENANT)
|
||||
.uid(flowA)
|
||||
.tenantId(tenantId)
|
||||
.uid(tenantId + flowA)
|
||||
.build()
|
||||
)
|
||||
.destination(FlowNode.builder()
|
||||
.id(flowB)
|
||||
.namespace(namespace)
|
||||
.tenantId(TenantService.MAIN_TENANT)
|
||||
.uid(flowB)
|
||||
.tenantId(tenantId)
|
||||
.uid(tenantId + flowB)
|
||||
.build()
|
||||
)
|
||||
.build();
|
||||
@@ -39,42 +39,45 @@ public abstract class AbstractFlowTopologyRepositoryTest {
|
||||
|
||||
@Test
|
||||
void findByFlow() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
flowTopologyRepository.save(
|
||||
createSimpleFlowTopology("flow-a", "flow-b", "io.kestra.tests")
|
||||
createSimpleFlowTopology(tenant, "flow-a", "flow-b", "io.kestra.tests")
|
||||
);
|
||||
|
||||
List<FlowTopology> list = flowTopologyRepository.findByFlow(TenantService.MAIN_TENANT, "io.kestra.tests", "flow-a", false);
|
||||
List<FlowTopology> list = flowTopologyRepository.findByFlow(tenant, "io.kestra.tests", "flow-a", false);
|
||||
|
||||
assertThat(list.size()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findByNamespace() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
flowTopologyRepository.save(
|
||||
createSimpleFlowTopology("flow-a", "flow-b", "io.kestra.tests")
|
||||
createSimpleFlowTopology(tenant, "flow-a", "flow-b", "io.kestra.tests")
|
||||
);
|
||||
flowTopologyRepository.save(
|
||||
createSimpleFlowTopology("flow-c", "flow-d", "io.kestra.tests")
|
||||
createSimpleFlowTopology(tenant, "flow-c", "flow-d", "io.kestra.tests")
|
||||
);
|
||||
|
||||
List<FlowTopology> list = flowTopologyRepository.findByNamespace(TenantService.MAIN_TENANT, "io.kestra.tests");
|
||||
List<FlowTopology> list = flowTopologyRepository.findByNamespace(tenant, "io.kestra.tests");
|
||||
|
||||
assertThat(list.size()).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAll() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
flowTopologyRepository.save(
|
||||
createSimpleFlowTopology("flow-a", "flow-b", "io.kestra.tests")
|
||||
createSimpleFlowTopology(tenant, "flow-a", "flow-b", "io.kestra.tests")
|
||||
);
|
||||
flowTopologyRepository.save(
|
||||
createSimpleFlowTopology("flow-c", "flow-d", "io.kestra.tests")
|
||||
createSimpleFlowTopology(tenant, "flow-c", "flow-d", "io.kestra.tests")
|
||||
);
|
||||
flowTopologyRepository.save(
|
||||
createSimpleFlowTopology("flow-e", "flow-f", "io.kestra.tests.2")
|
||||
createSimpleFlowTopology(tenant, "flow-e", "flow-f", "io.kestra.tests.2")
|
||||
);
|
||||
|
||||
List<FlowTopology> list = flowTopologyRepository.findAll(TenantService.MAIN_TENANT);
|
||||
List<FlowTopology> list = flowTopologyRepository.findAll(tenant);
|
||||
|
||||
assertThat(list.size()).isEqualTo(3);
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import io.kestra.core.models.executions.LogEntry;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.plugin.core.dashboard.data.Logs;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -32,9 +33,7 @@ import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.models.flows.FlowScope.SYSTEM;
|
||||
import static io.kestra.core.models.flows.FlowScope.USER;
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatReflectiveOperationException;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
@KestraTest
|
||||
@@ -42,11 +41,11 @@ public abstract class AbstractLogRepositoryTest {
|
||||
@Inject
|
||||
protected LogRepositoryInterface logRepository;
|
||||
|
||||
protected static LogEntry.LogEntryBuilder logEntry(Level level) {
|
||||
return logEntry(level, IdUtils.create());
|
||||
protected static LogEntry.LogEntryBuilder logEntry(String tenantId, Level level) {
|
||||
return logEntry(tenantId, level, IdUtils.create());
|
||||
}
|
||||
|
||||
protected static LogEntry.LogEntryBuilder logEntry(Level level, String executionId) {
|
||||
protected static LogEntry.LogEntryBuilder logEntry(String tenantId, Level level, String executionId) {
|
||||
return LogEntry.builder()
|
||||
.flowId("flowId")
|
||||
.namespace("io.kestra.unittest")
|
||||
@@ -57,7 +56,7 @@ public abstract class AbstractLogRepositoryTest {
|
||||
.timestamp(Instant.now())
|
||||
.level(level)
|
||||
.thread("")
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenantId)
|
||||
.triggerId("triggerId")
|
||||
.message("john doe");
|
||||
}
|
||||
@@ -65,9 +64,10 @@ public abstract class AbstractLogRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_find_all(QueryFilter filter){
|
||||
logRepository.save(logEntry(Level.INFO, "executionId").build());
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
logRepository.save(logEntry(tenant, Level.INFO, "executionId").build());
|
||||
|
||||
ArrayListTotal<LogEntry> entries = logRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter));
|
||||
ArrayListTotal<LogEntry> entries = logRepository.find(Pageable.UNPAGED, tenant, List.of(filter));
|
||||
|
||||
assertThat(entries).hasSize(1);
|
||||
}
|
||||
@@ -75,9 +75,10 @@ public abstract class AbstractLogRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_find_async(QueryFilter filter){
|
||||
logRepository.save(logEntry(Level.INFO, "executionId").build());
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
logRepository.save(logEntry(tenant, Level.INFO, "executionId").build());
|
||||
|
||||
Flux<LogEntry> find = logRepository.findAsync(MAIN_TENANT, List.of(filter));
|
||||
Flux<LogEntry> find = logRepository.findAsync(tenant, List.of(filter));
|
||||
|
||||
List<LogEntry> logEntries = find.collectList().block();
|
||||
assertThat(logEntries).hasSize(1);
|
||||
@@ -86,11 +87,12 @@ public abstract class AbstractLogRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_delete_with_filter(QueryFilter filter){
|
||||
logRepository.save(logEntry(Level.INFO, "executionId").build());
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
logRepository.save(logEntry(tenant, Level.INFO, "executionId").build());
|
||||
|
||||
logRepository.deleteByFilters(MAIN_TENANT, List.of(filter));
|
||||
logRepository.deleteByFilters(tenant, List.of(filter));
|
||||
|
||||
assertThat(logRepository.findAllAsync(MAIN_TENANT).collectList().block()).isEmpty();
|
||||
assertThat(logRepository.findAllAsync(tenant).collectList().block()).isEmpty();
|
||||
}
|
||||
|
||||
|
||||
@@ -150,7 +152,10 @@ public abstract class AbstractLogRepositoryTest {
|
||||
void should_fail_to_find_all(QueryFilter filter){
|
||||
assertThrows(
|
||||
InvalidQueryFiltersException.class,
|
||||
() -> logRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter)));
|
||||
() -> logRepository.find(
|
||||
Pageable.UNPAGED,
|
||||
TestsUtils.randomTenant(this.getClass().getSimpleName()),
|
||||
List.of(filter)));
|
||||
|
||||
}
|
||||
|
||||
@@ -168,16 +173,17 @@ public abstract class AbstractLogRepositoryTest {
|
||||
|
||||
@Test
|
||||
void all() {
|
||||
LogEntry.LogEntryBuilder builder = logEntry(Level.INFO);
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
LogEntry.LogEntryBuilder builder = logEntry(tenant, Level.INFO);
|
||||
|
||||
ArrayListTotal<LogEntry> find = logRepository.find(Pageable.UNPAGED, MAIN_TENANT, null);
|
||||
ArrayListTotal<LogEntry> find = logRepository.find(Pageable.UNPAGED, tenant, null);
|
||||
assertThat(find.size()).isZero();
|
||||
|
||||
|
||||
LogEntry save = logRepository.save(builder.build());
|
||||
logRepository.save(builder.executionKind(ExecutionKind.TEST).build()); // should only be loaded by execution id
|
||||
|
||||
find = logRepository.find(Pageable.UNPAGED, MAIN_TENANT, null);
|
||||
find = logRepository.find(Pageable.UNPAGED, tenant, null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
var filters = List.of(QueryFilter.builder()
|
||||
@@ -193,7 +199,7 @@ public abstract class AbstractLogRepositoryTest {
|
||||
find = logRepository.find(Pageable.UNPAGED, "doe", filters);
|
||||
assertThat(find.size()).isZero();
|
||||
|
||||
find = logRepository.find(Pageable.UNPAGED, MAIN_TENANT, null);
|
||||
find = logRepository.find(Pageable.UNPAGED, tenant, null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
@@ -201,141 +207,146 @@ public abstract class AbstractLogRepositoryTest {
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
List<LogEntry> list = logRepository.findByExecutionId(MAIN_TENANT, save.getExecutionId(), null);
|
||||
List<LogEntry> list = logRepository.findByExecutionId(tenant, save.getExecutionId(), null);
|
||||
assertThat(list.size()).isEqualTo(2);
|
||||
assertThat(list.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
list = logRepository.findByExecutionId(MAIN_TENANT, "io.kestra.unittest", "flowId", save.getExecutionId(), null);
|
||||
list = logRepository.findByExecutionId(tenant, "io.kestra.unittest", "flowId", save.getExecutionId(), null);
|
||||
assertThat(list.size()).isEqualTo(2);
|
||||
assertThat(list.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
list = logRepository.findByExecutionIdAndTaskId(MAIN_TENANT, save.getExecutionId(), save.getTaskId(), null);
|
||||
list = logRepository.findByExecutionIdAndTaskId(tenant, save.getExecutionId(), save.getTaskId(), null);
|
||||
assertThat(list.size()).isEqualTo(2);
|
||||
assertThat(list.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
list = logRepository.findByExecutionIdAndTaskId(MAIN_TENANT, "io.kestra.unittest", "flowId", save.getExecutionId(), save.getTaskId(), null);
|
||||
list = logRepository.findByExecutionIdAndTaskId(tenant, "io.kestra.unittest", "flowId", save.getExecutionId(), save.getTaskId(), null);
|
||||
assertThat(list.size()).isEqualTo(2);
|
||||
assertThat(list.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
list = logRepository.findByExecutionIdAndTaskRunId(MAIN_TENANT, save.getExecutionId(), save.getTaskRunId(), null);
|
||||
list = logRepository.findByExecutionIdAndTaskRunId(tenant, save.getExecutionId(), save.getTaskRunId(), null);
|
||||
assertThat(list.size()).isEqualTo(2);
|
||||
assertThat(list.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
list = logRepository.findByExecutionIdAndTaskRunIdAndAttempt(MAIN_TENANT, save.getExecutionId(), save.getTaskRunId(), null, 0);
|
||||
list = logRepository.findByExecutionIdAndTaskRunIdAndAttempt(tenant, save.getExecutionId(), save.getTaskRunId(), null, 0);
|
||||
assertThat(list.size()).isEqualTo(2);
|
||||
assertThat(list.getFirst().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
Integer countDeleted = logRepository.purge(Execution.builder().id(save.getExecutionId()).build());
|
||||
assertThat(countDeleted).isEqualTo(2);
|
||||
|
||||
list = logRepository.findByExecutionIdAndTaskId(MAIN_TENANT, save.getExecutionId(), save.getTaskId(), null);
|
||||
list = logRepository.findByExecutionIdAndTaskId(tenant, save.getExecutionId(), save.getTaskId(), null);
|
||||
assertThat(list.size()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
void pageable() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
String executionId = "123";
|
||||
LogEntry.LogEntryBuilder builder = logEntry(Level.INFO);
|
||||
LogEntry.LogEntryBuilder builder = logEntry(tenant, Level.INFO);
|
||||
builder.executionId(executionId);
|
||||
|
||||
for (int i = 0; i < 80; i++) {
|
||||
logRepository.save(builder.build());
|
||||
}
|
||||
|
||||
builder = logEntry(Level.INFO).executionId(executionId).taskId("taskId2").taskRunId("taskRunId2");
|
||||
builder = logEntry(tenant, Level.INFO).executionId(executionId).taskId("taskId2").taskRunId("taskRunId2");
|
||||
LogEntry logEntry2 = logRepository.save(builder.build());
|
||||
for (int i = 0; i < 20; i++) {
|
||||
logRepository.save(builder.build());
|
||||
}
|
||||
|
||||
ArrayListTotal<LogEntry> find = logRepository.findByExecutionId(MAIN_TENANT, executionId, null, Pageable.from(1, 50));
|
||||
ArrayListTotal<LogEntry> find = logRepository.findByExecutionId(tenant, executionId, null, Pageable.from(1, 50));
|
||||
|
||||
assertThat(find.size()).isEqualTo(50);
|
||||
assertThat(find.getTotal()).isEqualTo(101L);
|
||||
|
||||
find = logRepository.findByExecutionId(MAIN_TENANT, executionId, null, Pageable.from(3, 50));
|
||||
find = logRepository.findByExecutionId(tenant, executionId, null, Pageable.from(3, 50));
|
||||
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getTotal()).isEqualTo(101L);
|
||||
|
||||
find = logRepository.findByExecutionIdAndTaskId(MAIN_TENANT, executionId, logEntry2.getTaskId(), null, Pageable.from(1, 50));
|
||||
find = logRepository.findByExecutionIdAndTaskId(tenant, executionId, logEntry2.getTaskId(), null, Pageable.from(1, 50));
|
||||
|
||||
assertThat(find.size()).isEqualTo(21);
|
||||
assertThat(find.getTotal()).isEqualTo(21L);
|
||||
|
||||
find = logRepository.findByExecutionIdAndTaskRunId(MAIN_TENANT, executionId, logEntry2.getTaskRunId(), null, Pageable.from(1, 10));
|
||||
find = logRepository.findByExecutionIdAndTaskRunId(tenant, executionId, logEntry2.getTaskRunId(), null, Pageable.from(1, 10));
|
||||
|
||||
assertThat(find.size()).isEqualTo(10);
|
||||
assertThat(find.getTotal()).isEqualTo(21L);
|
||||
|
||||
find = logRepository.findByExecutionIdAndTaskRunIdAndAttempt(MAIN_TENANT, executionId, logEntry2.getTaskRunId(), null, 0, Pageable.from(1, 10));
|
||||
find = logRepository.findByExecutionIdAndTaskRunIdAndAttempt(tenant, executionId, logEntry2.getTaskRunId(), null, 0, Pageable.from(1, 10));
|
||||
|
||||
assertThat(find.size()).isEqualTo(10);
|
||||
assertThat(find.getTotal()).isEqualTo(21L);
|
||||
|
||||
find = logRepository.findByExecutionIdAndTaskRunId(MAIN_TENANT, executionId, logEntry2.getTaskRunId(), null, Pageable.from(10, 10));
|
||||
find = logRepository.findByExecutionIdAndTaskRunId(tenant, executionId, logEntry2.getTaskRunId(), null, Pageable.from(10, 10));
|
||||
|
||||
assertThat(find.size()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldFindByExecutionIdTestLogs() {
|
||||
var builder = logEntry(Level.INFO).executionId("123").executionKind(ExecutionKind.TEST).build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var builder = logEntry(tenant, Level.INFO).executionId("123").executionKind(ExecutionKind.TEST).build();
|
||||
logRepository.save(builder);
|
||||
|
||||
List<LogEntry> logs = logRepository.findByExecutionId(MAIN_TENANT, builder.getExecutionId(), null);
|
||||
List<LogEntry> logs = logRepository.findByExecutionId(tenant, builder.getExecutionId(), null);
|
||||
assertThat(logs).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteByQuery() {
|
||||
LogEntry log1 = logEntry(Level.INFO).build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
LogEntry log1 = logEntry(tenant, Level.INFO).build();
|
||||
logRepository.save(log1);
|
||||
|
||||
logRepository.deleteByQuery(MAIN_TENANT, log1.getExecutionId(), null, null, null, null);
|
||||
logRepository.deleteByQuery(tenant, log1.getExecutionId(), null, null, null, null);
|
||||
|
||||
ArrayListTotal<LogEntry> find = logRepository.findByExecutionId(MAIN_TENANT, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
ArrayListTotal<LogEntry> find = logRepository.findByExecutionId(tenant, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
assertThat(find.size()).isZero();
|
||||
|
||||
logRepository.save(log1);
|
||||
|
||||
logRepository.deleteByQuery(MAIN_TENANT, "io.kestra.unittest", "flowId", null, List.of(Level.TRACE, Level.DEBUG, Level.INFO), null, ZonedDateTime.now().plusMinutes(1));
|
||||
logRepository.deleteByQuery(tenant, "io.kestra.unittest", "flowId", null, List.of(Level.TRACE, Level.DEBUG, Level.INFO), null, ZonedDateTime.now().plusMinutes(1));
|
||||
|
||||
find = logRepository.findByExecutionId(MAIN_TENANT, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
find = logRepository.findByExecutionId(tenant, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
assertThat(find.size()).isZero();
|
||||
|
||||
logRepository.save(log1);
|
||||
|
||||
logRepository.deleteByQuery(MAIN_TENANT, "io.kestra.unittest", "flowId", null);
|
||||
logRepository.deleteByQuery(tenant, "io.kestra.unittest", "flowId", null);
|
||||
|
||||
find = logRepository.findByExecutionId(MAIN_TENANT, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
find = logRepository.findByExecutionId(tenant, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
assertThat(find.size()).isZero();
|
||||
|
||||
logRepository.save(log1);
|
||||
|
||||
logRepository.deleteByQuery(MAIN_TENANT, null, null, log1.getExecutionId(), List.of(Level.TRACE, Level.DEBUG, Level.INFO), null, ZonedDateTime.now().plusMinutes(1));
|
||||
logRepository.deleteByQuery(tenant, null, null, log1.getExecutionId(), List.of(Level.TRACE, Level.DEBUG, Level.INFO), null, ZonedDateTime.now().plusMinutes(1));
|
||||
|
||||
find = logRepository.findByExecutionId(MAIN_TENANT, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
find = logRepository.findByExecutionId(tenant, log1.getExecutionId(), null, Pageable.from(1, 50));
|
||||
assertThat(find.size()).isZero();
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllAsync() {
|
||||
logRepository.save(logEntry(Level.INFO).build());
|
||||
logRepository.save(logEntry(Level.INFO).executionKind(ExecutionKind.TEST).build()); // should be present as it's used for backup
|
||||
logRepository.save(logEntry(Level.ERROR).build());
|
||||
logRepository.save(logEntry(Level.WARN).build());
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
logRepository.save(logEntry(tenant, Level.INFO).build());
|
||||
logRepository.save(logEntry(tenant, Level.INFO).executionKind(ExecutionKind.TEST).build()); // should be present as it's used for backup
|
||||
logRepository.save(logEntry(tenant, Level.ERROR).build());
|
||||
logRepository.save(logEntry(tenant, Level.WARN).build());
|
||||
|
||||
Flux<LogEntry> find = logRepository.findAllAsync(MAIN_TENANT);
|
||||
Flux<LogEntry> find = logRepository.findAllAsync(tenant);
|
||||
List<LogEntry> logEntries = find.collectList().block();
|
||||
assertThat(logEntries).hasSize(4);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fetchData() throws IOException {
|
||||
logRepository.save(logEntry(Level.INFO).build());
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
logRepository.save(logEntry(tenant, Level.INFO).build());
|
||||
|
||||
var results = logRepository.fetchData(MAIN_TENANT,
|
||||
var results = logRepository.fetchData(tenant,
|
||||
Logs.builder()
|
||||
.type(Logs.class.getName())
|
||||
.columns(Map.of(
|
||||
|
||||
@@ -7,6 +7,7 @@ import io.kestra.core.models.executions.TaskRun;
|
||||
import io.kestra.core.models.executions.metrics.Counter;
|
||||
import io.kestra.core.models.executions.metrics.MetricAggregations;
|
||||
import io.kestra.core.models.executions.metrics.Timer;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -25,27 +26,28 @@ public abstract class AbstractMetricRepositoryTest {
|
||||
|
||||
@Test
|
||||
void all() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
String executionId = FriendlyId.createFriendlyId();
|
||||
TaskRun taskRun1 = taskRun(executionId, "task");
|
||||
TaskRun taskRun1 = taskRun(tenant, executionId, "task");
|
||||
MetricEntry counter = MetricEntry.of(taskRun1, counter("counter"), null);
|
||||
MetricEntry testCounter = MetricEntry.of(taskRun1, counter("test"), ExecutionKind.TEST);
|
||||
TaskRun taskRun2 = taskRun(executionId, "task");
|
||||
TaskRun taskRun2 = taskRun(tenant, executionId, "task");
|
||||
MetricEntry timer = MetricEntry.of(taskRun2, timer(), null);
|
||||
metricRepository.save(counter);
|
||||
metricRepository.save(testCounter); // should only be retrieved by execution id
|
||||
metricRepository.save(timer);
|
||||
|
||||
List<MetricEntry> results = metricRepository.findByExecutionId(null, executionId, Pageable.from(1, 10));
|
||||
List<MetricEntry> results = metricRepository.findByExecutionId(tenant, executionId, Pageable.from(1, 10));
|
||||
assertThat(results.size()).isEqualTo(3);
|
||||
|
||||
results = metricRepository.findByExecutionIdAndTaskId(null, executionId, taskRun1.getTaskId(), Pageable.from(1, 10));
|
||||
results = metricRepository.findByExecutionIdAndTaskId(tenant, executionId, taskRun1.getTaskId(), Pageable.from(1, 10));
|
||||
assertThat(results.size()).isEqualTo(3);
|
||||
|
||||
results = metricRepository.findByExecutionIdAndTaskRunId(null, executionId, taskRun1.getId(), Pageable.from(1, 10));
|
||||
results = metricRepository.findByExecutionIdAndTaskRunId(tenant, executionId, taskRun1.getId(), Pageable.from(1, 10));
|
||||
assertThat(results.size()).isEqualTo(2);
|
||||
|
||||
MetricAggregations aggregationResults = metricRepository.aggregateByFlowId(
|
||||
null,
|
||||
tenant,
|
||||
"namespace",
|
||||
"flow",
|
||||
null,
|
||||
@@ -59,7 +61,7 @@ public abstract class AbstractMetricRepositoryTest {
|
||||
assertThat(aggregationResults.getGroupBy()).isEqualTo("day");
|
||||
|
||||
aggregationResults = metricRepository.aggregateByFlowId(
|
||||
null,
|
||||
tenant,
|
||||
"namespace",
|
||||
"flow",
|
||||
null,
|
||||
@@ -76,11 +78,12 @@ public abstract class AbstractMetricRepositoryTest {
|
||||
|
||||
@Test
|
||||
void names() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
String executionId = FriendlyId.createFriendlyId();
|
||||
TaskRun taskRun1 = taskRun(executionId, "task");
|
||||
TaskRun taskRun1 = taskRun(tenant, executionId, "task");
|
||||
MetricEntry counter = MetricEntry.of(taskRun1, counter("counter"), null);
|
||||
|
||||
TaskRun taskRun2 = taskRun(executionId, "task2");
|
||||
TaskRun taskRun2 = taskRun(tenant, executionId, "task2");
|
||||
MetricEntry counter2 = MetricEntry.of(taskRun2, counter("counter2"), null);
|
||||
|
||||
MetricEntry test = MetricEntry.of(taskRun2, counter("test"), ExecutionKind.TEST);
|
||||
@@ -90,9 +93,9 @@ public abstract class AbstractMetricRepositoryTest {
|
||||
metricRepository.save(test); // should only be retrieved by execution id
|
||||
|
||||
|
||||
List<String> flowMetricsNames = metricRepository.flowMetrics(null, "namespace", "flow");
|
||||
List<String> taskMetricsNames = metricRepository.taskMetrics(null, "namespace", "flow", "task");
|
||||
List<String> tasksWithMetrics = metricRepository.tasksWithMetrics(null, "namespace", "flow");
|
||||
List<String> flowMetricsNames = metricRepository.flowMetrics(tenant, "namespace", "flow");
|
||||
List<String> taskMetricsNames = metricRepository.taskMetrics(tenant, "namespace", "flow", "task");
|
||||
List<String> tasksWithMetrics = metricRepository.tasksWithMetrics(tenant, "namespace", "flow");
|
||||
|
||||
assertThat(flowMetricsNames.size()).isEqualTo(2);
|
||||
assertThat(taskMetricsNames.size()).isEqualTo(1);
|
||||
@@ -101,17 +104,18 @@ public abstract class AbstractMetricRepositoryTest {
|
||||
|
||||
@Test
|
||||
void findAllAsync() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
String executionId = FriendlyId.createFriendlyId();
|
||||
TaskRun taskRun1 = taskRun(executionId, "task");
|
||||
TaskRun taskRun1 = taskRun(tenant, executionId, "task");
|
||||
MetricEntry counter = MetricEntry.of(taskRun1, counter("counter"), null);
|
||||
TaskRun taskRun2 = taskRun(executionId, "task");
|
||||
TaskRun taskRun2 = taskRun(tenant, executionId, "task");
|
||||
MetricEntry timer = MetricEntry.of(taskRun2, timer(), null);
|
||||
MetricEntry test = MetricEntry.of(taskRun2, counter("test"), ExecutionKind.TEST);
|
||||
metricRepository.save(counter);
|
||||
metricRepository.save(timer);
|
||||
metricRepository.save(test); // should be retrieved as findAllAsync is used for backup
|
||||
|
||||
List<MetricEntry> results = metricRepository.findAllAsync(null).collectList().block();
|
||||
List<MetricEntry> results = metricRepository.findAllAsync(tenant).collectList().block();
|
||||
assertThat(results).hasSize(3);
|
||||
}
|
||||
|
||||
@@ -123,8 +127,9 @@ public abstract class AbstractMetricRepositoryTest {
|
||||
return Timer.of("counter", Duration.ofSeconds(5));
|
||||
}
|
||||
|
||||
private TaskRun taskRun(String executionId, String taskId) {
|
||||
private TaskRun taskRun(String tenantId, String executionId, String taskId) {
|
||||
return TaskRun.builder()
|
||||
.tenantId(tenantId)
|
||||
.flowId("flow")
|
||||
.namespace("namespace")
|
||||
.executionId(executionId)
|
||||
|
||||
@@ -4,6 +4,8 @@ import io.kestra.core.events.CrudEvent;
|
||||
import io.kestra.core.events.CrudEventType;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.plugin.core.debug.Return;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.micronaut.context.event.ApplicationEventListener;
|
||||
@@ -11,7 +13,10 @@ import io.micronaut.data.model.Pageable;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import java.time.Duration;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
@@ -20,6 +25,8 @@ import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@@ -28,55 +35,60 @@ public abstract class AbstractTemplateRepositoryTest {
|
||||
@Inject
|
||||
protected TemplateRepositoryInterface templateRepository;
|
||||
|
||||
@BeforeEach
|
||||
protected void init() throws IOException, URISyntaxException {
|
||||
@BeforeAll
|
||||
protected static void init() throws IOException, URISyntaxException {
|
||||
TemplateListener.reset();
|
||||
}
|
||||
|
||||
protected static Template.TemplateBuilder<?, ?> builder() {
|
||||
return builder(null);
|
||||
protected static Template.TemplateBuilder<?, ?> builder(String tenantId) {
|
||||
return builder(tenantId, null);
|
||||
}
|
||||
|
||||
protected static Template.TemplateBuilder<?, ?> builder(String namespace) {
|
||||
protected static Template.TemplateBuilder<?, ?> builder(String tenantId, String namespace) {
|
||||
return Template.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace(namespace == null ? "kestra.test" : namespace)
|
||||
.tenantId(tenantId)
|
||||
.tasks(Collections.singletonList(Return.builder().id("test").type(Return.class.getName()).format(Property.ofValue("test")).build()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void findById() {
|
||||
Template template = builder().build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Template template = builder(tenant).build();
|
||||
templateRepository.create(template);
|
||||
|
||||
Optional<Template> full = templateRepository.findById(null, template.getNamespace(), template.getId());
|
||||
Optional<Template> full = templateRepository.findById(tenant, template.getNamespace(), template.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
assertThat(full.get().getId()).isEqualTo(template.getId());
|
||||
|
||||
full = templateRepository.findById(null, template.getNamespace(), template.getId());
|
||||
full = templateRepository.findById(tenant, template.getNamespace(), template.getId());
|
||||
assertThat(full.isPresent()).isTrue();
|
||||
assertThat(full.get().getId()).isEqualTo(template.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
void findByNamespace() {
|
||||
Template template1 = builder().build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Template template1 = builder(tenant).build();
|
||||
Template template2 = Template.builder()
|
||||
.id(IdUtils.create())
|
||||
.tenantId(tenant)
|
||||
.namespace("kestra.test.template").build();
|
||||
|
||||
templateRepository.create(template1);
|
||||
templateRepository.create(template2);
|
||||
|
||||
List<Template> templates = templateRepository.findByNamespace(null, template1.getNamespace());
|
||||
List<Template> templates = templateRepository.findByNamespace(tenant, template1.getNamespace());
|
||||
assertThat(templates.size()).isGreaterThanOrEqualTo(1);
|
||||
templates = templateRepository.findByNamespace(null, template2.getNamespace());
|
||||
templates = templateRepository.findByNamespace(tenant, template2.getNamespace());
|
||||
assertThat(templates.size()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void save() {
|
||||
Template template = builder().build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Template template = builder(tenant).build();
|
||||
Template save = templateRepository.create(template);
|
||||
|
||||
assertThat(save.getId()).isEqualTo(template.getId());
|
||||
@@ -84,41 +96,42 @@ public abstract class AbstractTemplateRepositoryTest {
|
||||
|
||||
@Test
|
||||
void findAll() {
|
||||
long saveCount = templateRepository.findAll(null).size();
|
||||
Template template = builder().build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
long saveCount = templateRepository.findAll(tenant).size();
|
||||
Template template = builder(tenant).build();
|
||||
templateRepository.create(template);
|
||||
long size = templateRepository.findAll(null).size();
|
||||
long size = templateRepository.findAll(tenant).size();
|
||||
assertThat(size).isGreaterThan(saveCount);
|
||||
templateRepository.delete(template);
|
||||
assertThat((long) templateRepository.findAll(null).size()).isEqualTo(saveCount);
|
||||
assertThat((long) templateRepository.findAll(tenant).size()).isEqualTo(saveCount);
|
||||
}
|
||||
|
||||
@Test
|
||||
void findAllForAllTenants() {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
long saveCount = templateRepository.findAllForAllTenants().size();
|
||||
Template template = builder().build();
|
||||
Template template = builder(tenant).build();
|
||||
templateRepository.create(template);
|
||||
long size = templateRepository.findAllForAllTenants().size();
|
||||
assertThat(size).isGreaterThan(saveCount);
|
||||
templateRepository.delete(template);
|
||||
assertThat((long) templateRepository.findAllForAllTenants().size()).isEqualTo(saveCount);
|
||||
}
|
||||
|
||||
@Test
|
||||
void find() {
|
||||
Template template1 = builder().build();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Template template1 = builder(tenant).build();
|
||||
templateRepository.create(template1);
|
||||
Template template2 = builder().build();
|
||||
Template template2 = builder(tenant).build();
|
||||
templateRepository.create(template2);
|
||||
Template template3 = builder().build();
|
||||
Template template3 = builder(tenant).build();
|
||||
templateRepository.create(template3);
|
||||
|
||||
// with pageable
|
||||
List<Template> save = templateRepository.find(Pageable.from(1, 10),null, null, "kestra.test");
|
||||
List<Template> save = templateRepository.find(Pageable.from(1, 10),null, tenant, "kestra.test");
|
||||
assertThat((long) save.size()).isGreaterThanOrEqualTo(3L);
|
||||
|
||||
// without pageable
|
||||
save = templateRepository.find(null, null, "kestra.test");
|
||||
save = templateRepository.find(null, tenant, "kestra.test");
|
||||
assertThat((long) save.size()).isGreaterThanOrEqualTo(3L);
|
||||
|
||||
templateRepository.delete(template1);
|
||||
@@ -126,31 +139,45 @@ public abstract class AbstractTemplateRepositoryTest {
|
||||
templateRepository.delete(template3);
|
||||
}
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractTemplateRepositoryTest.class);
|
||||
|
||||
@Test
|
||||
void delete() {
|
||||
Template template = builder().build();
|
||||
protected void delete() throws TimeoutException {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Template template = builder(tenant).build();
|
||||
|
||||
Template save = templateRepository.create(template);
|
||||
templateRepository.delete(save);
|
||||
|
||||
assertThat(templateRepository.findById(null, template.getNamespace(), template.getId()).isPresent()).isFalse();
|
||||
assertThat(templateRepository.findById(tenant, template.getNamespace(), template.getId()).isPresent()).isFalse();
|
||||
|
||||
assertThat(TemplateListener.getEmits().size()).isEqualTo(2);
|
||||
assertThat(TemplateListener.getEmits().stream().filter(r -> r.getType() == CrudEventType.CREATE).count()).isEqualTo(1L);
|
||||
assertThat(TemplateListener.getEmits().stream().filter(r -> r.getType() == CrudEventType.DELETE).count()).isEqualTo(1L);
|
||||
Await.until(() -> {
|
||||
LOG.info("-------------> number of event: {}", TemplateListener.getEmits(tenant).size());
|
||||
return TemplateListener.getEmits(tenant).size() == 2;
|
||||
|
||||
}, Duration.ofMillis(100), Duration.ofSeconds(5));
|
||||
assertThat(TemplateListener.getEmits(tenant).stream().filter(r -> r.getType() == CrudEventType.CREATE).count()).isEqualTo(1L);
|
||||
assertThat(TemplateListener.getEmits(tenant).stream().filter(r -> r.getType() == CrudEventType.DELETE).count()).isEqualTo(1L);
|
||||
}
|
||||
|
||||
@Singleton
|
||||
public static class TemplateListener implements ApplicationEventListener<CrudEvent<Template>> {
|
||||
private static List<CrudEvent<Template>> emits = new ArrayList<>();
|
||||
private static List<CrudEvent<Template>> emits = new CopyOnWriteArrayList<>();
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(CrudEvent<Template> event) {
|
||||
emits.add(event);
|
||||
//The instanceOf is required because Micronaut may send non Template event via this method
|
||||
if ((event.getModel() != null && event.getModel() instanceof Template) ||
|
||||
(event.getPreviousModel() != null && event.getPreviousModel() instanceof Template)) {
|
||||
emits.add(event);
|
||||
}
|
||||
}
|
||||
|
||||
public static List<CrudEvent<Template>> getEmits() {
|
||||
return emits;
|
||||
public static List<CrudEvent<Template>> getEmits(String tenantId){
|
||||
return emits.stream()
|
||||
.filter(e -> (e.getModel() != null && e.getModel().getTenantId().equals(tenantId)) ||
|
||||
(e.getPreviousModel() != null && e.getPreviousModel().getTenantId().equals(tenantId)))
|
||||
.toList();
|
||||
}
|
||||
|
||||
public static void reset() {
|
||||
|
||||
@@ -9,6 +9,7 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface.ChildFilter;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import io.micronaut.data.model.Sort;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -24,7 +25,6 @@ import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static io.kestra.core.models.flows.FlowScope.USER;
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
@@ -35,8 +35,9 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
@Inject
|
||||
protected TriggerRepositoryInterface triggerRepository;
|
||||
|
||||
private static Trigger.TriggerBuilder<?, ?> trigger() {
|
||||
private static Trigger.TriggerBuilder<?, ?> trigger(String tenantId) {
|
||||
return Trigger.builder()
|
||||
.tenantId(tenantId)
|
||||
.flowId(IdUtils.create())
|
||||
.namespace(TEST_NAMESPACE)
|
||||
.triggerId(IdUtils.create())
|
||||
@@ -44,9 +45,9 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
.date(ZonedDateTime.now());
|
||||
}
|
||||
|
||||
protected static Trigger generateDefaultTrigger(){
|
||||
protected static Trigger generateDefaultTrigger(String tenantId){
|
||||
Trigger trigger = Trigger.builder()
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenantId)
|
||||
.triggerId("triggerId")
|
||||
.namespace("trigger.namespace")
|
||||
.flowId("flowId")
|
||||
@@ -59,9 +60,10 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_find_all(QueryFilter filter){
|
||||
triggerRepository.save(generateDefaultTrigger());
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
triggerRepository.save(generateDefaultTrigger(tenant));
|
||||
|
||||
ArrayListTotal<Trigger> entries = triggerRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter));
|
||||
ArrayListTotal<Trigger> entries = triggerRepository.find(Pageable.UNPAGED, tenant, List.of(filter));
|
||||
|
||||
assertThat(entries).hasSize(1);
|
||||
}
|
||||
@@ -69,9 +71,10 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("filterCombinations")
|
||||
void should_find_all_async(QueryFilter filter){
|
||||
triggerRepository.save(generateDefaultTrigger());
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
triggerRepository.save(generateDefaultTrigger(tenant));
|
||||
|
||||
List<Trigger> entries = triggerRepository.find(MAIN_TENANT, List.of(filter)).collectList().block();
|
||||
List<Trigger> entries = triggerRepository.find(tenant, List.of(filter)).collectList().block();
|
||||
|
||||
assertThat(entries).hasSize(1);
|
||||
}
|
||||
@@ -92,7 +95,7 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
@ParameterizedTest
|
||||
@MethodSource("errorFilterCombinations")
|
||||
void should_fail_to_find_all(QueryFilter filter){
|
||||
assertThrows(InvalidQueryFiltersException.class, () -> triggerRepository.find(Pageable.UNPAGED, MAIN_TENANT, List.of(filter)));
|
||||
assertThrows(InvalidQueryFiltersException.class, () -> triggerRepository.find(Pageable.UNPAGED, TestsUtils.randomTenant(this.getClass().getSimpleName()), List.of(filter)));
|
||||
}
|
||||
|
||||
static Stream<QueryFilter> errorFilterCombinations() {
|
||||
@@ -110,7 +113,8 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
|
||||
@Test
|
||||
void all() {
|
||||
Trigger.TriggerBuilder<?, ?> builder = trigger();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Trigger.TriggerBuilder<?, ?> builder = trigger(tenant);
|
||||
|
||||
Optional<Trigger> findLast = triggerRepository.findLast(builder.build());
|
||||
assertThat(findLast.isPresent()).isFalse();
|
||||
@@ -130,47 +134,47 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
assertThat(findLast.get().getExecutionId()).isEqualTo(save.getExecutionId());
|
||||
|
||||
|
||||
triggerRepository.save(trigger().build());
|
||||
triggerRepository.save(trigger().build());
|
||||
Trigger searchedTrigger = trigger().build();
|
||||
triggerRepository.save(trigger(tenant).build());
|
||||
triggerRepository.save(trigger(tenant).build());
|
||||
Trigger searchedTrigger = trigger(tenant).build();
|
||||
triggerRepository.save(searchedTrigger);
|
||||
|
||||
List<Trigger> all = triggerRepository.findAllForAllTenants();
|
||||
|
||||
assertThat(all.size()).isEqualTo(4);
|
||||
assertThat(all.size()).isGreaterThanOrEqualTo(4);
|
||||
|
||||
all = triggerRepository.findAll(null);
|
||||
all = triggerRepository.findAll(tenant);
|
||||
|
||||
assertThat(all.size()).isEqualTo(4);
|
||||
|
||||
String namespacePrefix = "io.kestra.another";
|
||||
String namespace = namespacePrefix + ".ns";
|
||||
Trigger trigger = trigger().namespace(namespace).build();
|
||||
Trigger trigger = trigger(tenant).namespace(namespace).build();
|
||||
triggerRepository.save(trigger);
|
||||
|
||||
List<Trigger> find = triggerRepository.find(Pageable.from(1, 4, Sort.of(Sort.Order.asc("namespace"))), null, null, null, null, null);
|
||||
List<Trigger> find = triggerRepository.find(Pageable.from(1, 4, Sort.of(Sort.Order.asc("namespace"))), null, tenant, null, null, null);
|
||||
assertThat(find.size()).isEqualTo(4);
|
||||
assertThat(find.getFirst().getNamespace()).isEqualTo(namespace);
|
||||
|
||||
find = triggerRepository.find(Pageable.from(1, 4, Sort.of(Sort.Order.asc("namespace"))), null, null, null, searchedTrigger.getFlowId(), null);
|
||||
find = triggerRepository.find(Pageable.from(1, 4, Sort.of(Sort.Order.asc("namespace"))), null, tenant, null, searchedTrigger.getFlowId(), null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getFlowId()).isEqualTo(searchedTrigger.getFlowId());
|
||||
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.of(Sort.Order.asc(triggerRepository.sortMapping().apply("triggerId")))), null, null, namespacePrefix, null, null);
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.of(Sort.Order.asc(triggerRepository.sortMapping().apply("triggerId")))), null, tenant, namespacePrefix, null, null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getTriggerId()).isEqualTo(trigger.getTriggerId());
|
||||
|
||||
// Full text search is on namespace, flowId, triggerId, executionId
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), trigger.getNamespace(), null, null, null, null);
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), trigger.getNamespace(), tenant, null, null, null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getTriggerId()).isEqualTo(trigger.getTriggerId());
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), searchedTrigger.getFlowId(), null, null, null, null);
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), searchedTrigger.getFlowId(), tenant, null, null, null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getTriggerId()).isEqualTo(searchedTrigger.getTriggerId());
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), searchedTrigger.getTriggerId(), null, null, null, null);
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), searchedTrigger.getTriggerId(), tenant, null, null, null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getTriggerId()).isEqualTo(searchedTrigger.getTriggerId());
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), searchedTrigger.getExecutionId(), null, null, null, null);
|
||||
find = triggerRepository.find(Pageable.from(1, 100, Sort.UNSORTED), searchedTrigger.getExecutionId(), tenant, null, null, null);
|
||||
assertThat(find.size()).isEqualTo(1);
|
||||
assertThat(find.getFirst().getTriggerId()).isEqualTo(searchedTrigger.getTriggerId());
|
||||
}
|
||||
@@ -178,15 +182,17 @@ public abstract class AbstractTriggerRepositoryTest {
|
||||
@Test
|
||||
void shouldCountForNullTenant() {
|
||||
// Given
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
triggerRepository.save(Trigger
|
||||
.builder()
|
||||
.tenantId(tenant)
|
||||
.triggerId(IdUtils.create())
|
||||
.flowId(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.build()
|
||||
);
|
||||
// When
|
||||
int count = triggerRepository.count(null);
|
||||
int count = triggerRepository.count(tenant);
|
||||
// Then
|
||||
assertThat(count).isEqualTo(1);
|
||||
}
|
||||
|
||||
@@ -1,88 +1,92 @@
|
||||
package io.kestra.core.repositories;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.executions.*;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Collections;
|
||||
|
||||
class ExecutionFixture {
|
||||
public static final Execution EXECUTION_1 = Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.tenantId(MAIN_TENANT)
|
||||
.flowId("full")
|
||||
.flowRevision(1)
|
||||
.state(new State())
|
||||
.inputs(ImmutableMap.of("test", "value"))
|
||||
.taskRunList(Collections.singletonList(
|
||||
TaskRun.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("full")
|
||||
.state(new State())
|
||||
.attempts(Collections.singletonList(
|
||||
TaskRunAttempt.builder()
|
||||
.build()
|
||||
))
|
||||
.outputs(Variables.inMemory(ImmutableMap.of(
|
||||
"out", "value"
|
||||
)))
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
public static Execution EXECUTION_1(String tenant) {
|
||||
return Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.tenantId(tenant)
|
||||
.flowId("full")
|
||||
.flowRevision(1)
|
||||
.state(new State())
|
||||
.inputs(ImmutableMap.of("test", "value"))
|
||||
.taskRunList(Collections.singletonList(
|
||||
TaskRun.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("full")
|
||||
.state(new State())
|
||||
.attempts(Collections.singletonList(
|
||||
TaskRunAttempt.builder()
|
||||
.build()
|
||||
))
|
||||
.outputs(Variables.inMemory(ImmutableMap.of(
|
||||
"out", "value"
|
||||
)))
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
}
|
||||
|
||||
public static final Execution EXECUTION_2 = Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.tenantId(MAIN_TENANT)
|
||||
.flowId("full")
|
||||
.flowRevision(1)
|
||||
.state(new State())
|
||||
.inputs(ImmutableMap.of("test", 1))
|
||||
.taskRunList(Collections.singletonList(
|
||||
TaskRun.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("full")
|
||||
.state(new State())
|
||||
.attempts(Collections.singletonList(
|
||||
TaskRunAttempt.builder()
|
||||
.build()
|
||||
))
|
||||
.outputs(Variables.inMemory(ImmutableMap.of(
|
||||
"out", 1
|
||||
)))
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
public static Execution EXECUTION_2(String tenant) {
|
||||
return Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.tenantId(tenant)
|
||||
.flowId("full")
|
||||
.flowRevision(1)
|
||||
.state(new State())
|
||||
.inputs(ImmutableMap.of("test", 1))
|
||||
.taskRunList(Collections.singletonList(
|
||||
TaskRun.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("full")
|
||||
.state(new State())
|
||||
.attempts(Collections.singletonList(
|
||||
TaskRunAttempt.builder()
|
||||
.build()
|
||||
))
|
||||
.outputs(Variables.inMemory(ImmutableMap.of(
|
||||
"out", 1
|
||||
)))
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
}
|
||||
|
||||
public static final Execution EXECUTION_TEST = Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("full")
|
||||
.flowRevision(1)
|
||||
.state(new State())
|
||||
.inputs(ImmutableMap.of("test", 1))
|
||||
.kind(ExecutionKind.TEST)
|
||||
.taskRunList(Collections.singletonList(
|
||||
TaskRun.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("full")
|
||||
.state(new State())
|
||||
.attempts(Collections.singletonList(
|
||||
TaskRunAttempt.builder()
|
||||
.build()
|
||||
))
|
||||
.outputs(Variables.inMemory(ImmutableMap.of(
|
||||
"out", 1
|
||||
)))
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
}
|
||||
public static Execution EXECUTION_TEST(String tenant) {
|
||||
return Execution.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.tenantId(tenant)
|
||||
.flowId("full")
|
||||
.flowRevision(1)
|
||||
.state(new State())
|
||||
.inputs(ImmutableMap.of("test", 1))
|
||||
.kind(ExecutionKind.TEST)
|
||||
.taskRunList(Collections.singletonList(
|
||||
TaskRun.builder()
|
||||
.id(IdUtils.create())
|
||||
.namespace("io.kestra.unittest")
|
||||
.flowId("full")
|
||||
.state(new State())
|
||||
.attempts(Collections.singletonList(
|
||||
TaskRunAttempt.builder()
|
||||
.build()
|
||||
))
|
||||
.outputs(Variables.inMemory(ImmutableMap.of(
|
||||
"out", 1
|
||||
)))
|
||||
.build()
|
||||
))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
|
||||
@KestraTest(startRunner = true)
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
public abstract class AbstractExecutorTest {
|
||||
|
||||
@Inject
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.junit.annotations.ExecuteFlow;
|
||||
import io.kestra.core.junit.annotations.FlakyTest;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
@@ -29,15 +28,17 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
// must be per-class to allow calling once init() which took a lot of time
|
||||
public abstract class AbstractRunnerTest {
|
||||
|
||||
public static final String TENANT_1 = "tenant1";
|
||||
public static final String TENANT_2 = "tenant2";
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
|
||||
protected QueueInterface<LogEntry> logsQueue;
|
||||
|
||||
@Inject
|
||||
private RestartCaseTest restartCaseTest;
|
||||
protected RestartCaseTest restartCaseTest;
|
||||
|
||||
@Inject
|
||||
protected FlowTriggerCaseTest flowTriggerCaseTest;
|
||||
@@ -55,7 +56,7 @@ public abstract class AbstractRunnerTest {
|
||||
private WorkingDirectoryTest.Suite workingDirectoryTest;
|
||||
|
||||
@Inject
|
||||
private PauseTest.Suite pauseTest;
|
||||
protected PauseTest.Suite pauseTest;
|
||||
|
||||
@Inject
|
||||
private SkipExecutionCaseTest skipExecutionCaseTest;
|
||||
@@ -67,10 +68,10 @@ public abstract class AbstractRunnerTest {
|
||||
protected LoopUntilCaseTest loopUntilTestCaseTest;
|
||||
|
||||
@Inject
|
||||
private FlowConcurrencyCaseTest flowConcurrencyCaseTest;
|
||||
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
|
||||
|
||||
@Inject
|
||||
private ScheduleDateCaseTest scheduleDateCaseTest;
|
||||
protected ScheduleDateCaseTest scheduleDateCaseTest;
|
||||
|
||||
@Inject
|
||||
protected FlowInputOutput flowIO;
|
||||
@@ -79,7 +80,7 @@ public abstract class AbstractRunnerTest {
|
||||
private SLATestCase slaTestCase;
|
||||
|
||||
@Inject
|
||||
private ChangeStateTestCase changeStateTestCase;
|
||||
protected ChangeStateTestCase changeStateTestCase;
|
||||
|
||||
@Inject
|
||||
private AfterExecutionTestCase afterExecutionTestCase;
|
||||
@@ -195,12 +196,12 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/trigger-flow-listener-no-inputs.yaml",
|
||||
@LoadFlows(value = {"flows/valids/trigger-flow-listener-no-inputs.yaml",
|
||||
"flows/valids/trigger-flow-listener.yaml",
|
||||
"flows/valids/trigger-flow-listener-namespace-condition.yaml",
|
||||
"flows/valids/trigger-flow.yaml"})
|
||||
"flows/valids/trigger-flow.yaml"}, tenantId = "listener-tenant")
|
||||
void flowTrigger() throws Exception {
|
||||
flowTriggerCaseTest.trigger();
|
||||
flowTriggerCaseTest.trigger("listener-tenant");
|
||||
}
|
||||
|
||||
@Test // flaky on CI but never fail locally
|
||||
@@ -210,13 +211,11 @@ public abstract class AbstractRunnerTest {
|
||||
flowTriggerCaseTest.triggerWithPause();
|
||||
}
|
||||
|
||||
@FlakyTest
|
||||
@Disabled
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/trigger-flow-listener-with-concurrency-limit.yaml",
|
||||
"flows/valids/trigger-flow-with-concurrency-limit.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/trigger-flow-listener-with-concurrency-limit.yaml",
|
||||
"flows/valids/trigger-flow-with-concurrency-limit.yaml"}, tenantId = "trigger-tenant")
|
||||
void flowTriggerWithConcurrencyLimit() throws Exception {
|
||||
flowTriggerCaseTest.triggerWithConcurrencyLimit();
|
||||
flowTriggerCaseTest.triggerWithConcurrencyLimit("trigger-tenant");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -228,11 +227,11 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test // Flaky on CI but never locally even with 100 repetitions
|
||||
@LoadFlows({"flows/valids/trigger-flow-listener-namespace-condition.yaml",
|
||||
@LoadFlows(value = {"flows/valids/trigger-flow-listener-namespace-condition.yaml",
|
||||
"flows/valids/trigger-multiplecondition-flow-c.yaml",
|
||||
"flows/valids/trigger-multiplecondition-flow-d.yaml"})
|
||||
"flows/valids/trigger-multiplecondition-flow-d.yaml"}, tenantId = "condition-tenant")
|
||||
void multipleConditionTriggerFailed() throws Exception {
|
||||
multipleConditionTriggerCaseTest.failed();
|
||||
multipleConditionTriggerCaseTest.failed("condition-tenant");
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -245,11 +244,11 @@ public abstract class AbstractRunnerTest {
|
||||
|
||||
@Disabled
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-trigger-preconditions-flow-listen.yaml",
|
||||
@LoadFlows(value = {"flows/valids/flow-trigger-preconditions-flow-listen.yaml",
|
||||
"flows/valids/flow-trigger-preconditions-flow-a.yaml",
|
||||
"flows/valids/flow-trigger-preconditions-flow-b.yaml"})
|
||||
"flows/valids/flow-trigger-preconditions-flow-b.yaml"}, tenantId = TENANT_1)
|
||||
void flowTriggerPreconditionsMergeOutputs() throws Exception {
|
||||
multipleConditionTriggerCaseTest.flowTriggerPreconditionsMergeOutputs();
|
||||
multipleConditionTriggerCaseTest.flowTriggerPreconditionsMergeOutputs(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -279,19 +278,19 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/switch.yaml",
|
||||
@LoadFlows(value = {"flows/valids/switch.yaml",
|
||||
"flows/valids/task-flow.yaml",
|
||||
"flows/valids/task-flow-inherited-labels.yaml"})
|
||||
"flows/valids/task-flow-inherited-labels.yaml"}, tenantId = TENANT_1)
|
||||
void flowWaitFailed() throws Exception {
|
||||
flowCaseTest.waitFailed();
|
||||
flowCaseTest.waitFailed(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/switch.yaml",
|
||||
@LoadFlows(value = {"flows/valids/switch.yaml",
|
||||
"flows/valids/task-flow.yaml",
|
||||
"flows/valids/task-flow-inherited-labels.yaml"})
|
||||
"flows/valids/task-flow-inherited-labels.yaml"}, tenantId = TENANT_2)
|
||||
public void invalidOutputs() throws Exception {
|
||||
flowCaseTest.invalidOutputs();
|
||||
flowCaseTest.invalidOutputs(TENANT_2);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -301,9 +300,9 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/working-directory.yaml"}, tenantId = "tenant1")
|
||||
@LoadFlows(value = {"flows/valids/working-directory.yaml"}, tenantId = TENANT_1)
|
||||
public void workerFailed() throws Exception {
|
||||
workingDirectoryTest.failed("tenant1", runnerUtils);
|
||||
workingDirectoryTest.failed(TENANT_1, runnerUtils);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -354,7 +353,6 @@ public abstract class AbstractRunnerTest {
|
||||
skipExecutionCaseTest.skipExecution();
|
||||
}
|
||||
|
||||
@Disabled
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/for-each-item-subflow.yaml",
|
||||
"flows/valids/for-each-item.yaml"})
|
||||
@@ -363,12 +361,11 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/for-each-item.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/for-each-item.yaml"}, tenantId = TENANT_1)
|
||||
protected void forEachItemEmptyItems() throws Exception {
|
||||
forEachItemCaseTest.forEachItemEmptyItems();
|
||||
forEachItemCaseTest.forEachItemEmptyItems(TENANT_1);
|
||||
}
|
||||
|
||||
@Disabled
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/for-each-item-subflow-failed.yaml",
|
||||
"flows/valids/for-each-item-failed.yaml"})
|
||||
@@ -384,16 +381,16 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test // flaky on CI but always pass locally even with 100 iterations
|
||||
@LoadFlows({"flows/valids/restart-for-each-item.yaml", "flows/valids/restart-child.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/restart-for-each-item.yaml", "flows/valids/restart-child.yaml"}, tenantId = TENANT_1)
|
||||
void restartForEachItem() throws Exception {
|
||||
forEachItemCaseTest.restartForEachItem();
|
||||
forEachItemCaseTest.restartForEachItem(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/for-each-item-subflow.yaml",
|
||||
"flows/valids/for-each-item-in-if.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/for-each-item-subflow.yaml",
|
||||
"flows/valids/for-each-item-in-if.yaml"}, tenantId = TENANT_1)
|
||||
protected void forEachItemInIf() throws Exception {
|
||||
forEachItemCaseTest.forEachItemInIf();
|
||||
forEachItemCaseTest.forEachItemInIf(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -434,12 +431,11 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"})
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
|
||||
protected void flowConcurrencyWithForEachItem() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem();
|
||||
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
|
||||
}
|
||||
|
||||
@Disabled
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
|
||||
protected void concurrencyQueueRestarted() throws Exception {
|
||||
@@ -452,6 +448,12 @@ public abstract class AbstractRunnerTest {
|
||||
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
|
||||
void flowConcurrencySubflow() throws Exception {
|
||||
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/executable-fail.yml")
|
||||
void badExecutable(Execution execution) {
|
||||
@@ -504,9 +506,9 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/minimal.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/minimal.yaml"}, tenantId = TENANT_1)
|
||||
void shouldScheduleOnDate() throws Exception {
|
||||
scheduleDateCaseTest.shouldScheduleOnDate();
|
||||
scheduleDateCaseTest.shouldScheduleOnDate(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -528,15 +530,15 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/sla-execution-condition.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/sla-execution-condition.yaml"}, tenantId = TENANT_1)
|
||||
void executionConditionSLAShouldCancel() throws Exception {
|
||||
slaTestCase.executionConditionSLAShouldCancel();
|
||||
slaTestCase.executionConditionSLAShouldCancel(TENANT_1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/sla-execution-condition.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/sla-execution-condition.yaml"}, tenantId = TENANT_2)
|
||||
void executionConditionSLAShouldLabel() throws Exception {
|
||||
slaTestCase.executionConditionSLAShouldLabel();
|
||||
slaTestCase.executionConditionSLAShouldLabel(TENANT_2);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -556,15 +558,15 @@ public abstract class AbstractRunnerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/failed-first.yaml")
|
||||
@ExecuteFlow(value = "flows/valids/failed-first.yaml", tenantId = TENANT_1)
|
||||
public void changeStateShouldEndsInSuccess(Execution execution) throws Exception {
|
||||
changeStateTestCase.changeStateShouldEndsInSuccess(execution);
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/failed-first.yaml", "flows/valids/subflow-parent-of-failed.yaml"})
|
||||
@LoadFlows(value = {"flows/valids/failed-first.yaml", "flows/valids/subflow-parent-of-failed.yaml"}, tenantId = TENANT_2)
|
||||
public void changeStateInSubflowShouldEndsParentFlowInSuccess() throws Exception {
|
||||
changeStateTestCase.changeStateInSubflowShouldEndsParentFlowInSuccess();
|
||||
changeStateTestCase.changeStateInSubflowShouldEndsParentFlowInSuccess(TENANT_2);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -3,25 +3,18 @@ package io.kestra.core.runners;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.ExecutionService;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@Singleton
|
||||
public class ChangeStateTestCase {
|
||||
|
||||
public static final String NAMESPACE = "io.kestra.tests";
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@@ -29,11 +22,7 @@ public class ChangeStateTestCase {
|
||||
private ExecutionService executionService;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
private QueueInterface<Execution> executionQueue;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
public void changeStateShouldEndsInSuccess(Execution execution) throws Exception {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
@@ -41,73 +30,40 @@ public class ChangeStateTestCase {
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// await for the last execution
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> lastExecution = new AtomicReference<>();
|
||||
Flux<Execution> receivedExecutions = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution exec = either.getLeft();
|
||||
if (execution.getId().equals(exec.getId()) && exec.getState().getCurrent() == State.Type.SUCCESS) {
|
||||
lastExecution.set(exec);
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
Flow flow = flowRepository.findByExecution(execution);
|
||||
Execution markedAs = executionService.markAs(execution, flow, execution.getTaskRunList().getFirst().getId(), State.Type.SUCCESS);
|
||||
executionQueue.emit(markedAs);
|
||||
Execution lastExecution = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), markedAs);
|
||||
|
||||
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
|
||||
receivedExecutions.blockLast();
|
||||
assertThat(lastExecution.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(lastExecution.get().getTaskRunList()).hasSize(2);
|
||||
assertThat(lastExecution.get().getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(lastExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(lastExecution.getTaskRunList()).hasSize(2);
|
||||
assertThat(lastExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
public void changeStateInSubflowShouldEndsParentFlowInSuccess() throws Exception {
|
||||
// await for the subflow execution
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> lastExecution = new AtomicReference<>();
|
||||
Flux<Execution> receivedExecutions = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution exec = either.getLeft();
|
||||
if ("failed-first".equals(exec.getFlowId()) && exec.getState().getCurrent() == State.Type.FAILED) {
|
||||
lastExecution.set(exec);
|
||||
latch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
public void changeStateInSubflowShouldEndsParentFlowInSuccess(String tenantId) throws Exception {
|
||||
// run the parent flow
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "subflow-parent-of-failed");
|
||||
Execution execution = runnerUtils.runOne(tenantId, NAMESPACE, "subflow-parent-of-failed");
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(execution.getTaskRunList()).hasSize(1);
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// assert on the subflow
|
||||
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
|
||||
receivedExecutions.blockLast();
|
||||
assertThat(lastExecution.get().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(lastExecution.get().getTaskRunList()).hasSize(1);
|
||||
assertThat(lastExecution.get().getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// await for the parent execution
|
||||
CountDownLatch parentLatch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> lastParentExecution = new AtomicReference<>();
|
||||
receivedExecutions = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution exec = either.getLeft();
|
||||
if (execution.getId().equals(exec.getId()) && exec.getState().isTerminated()) {
|
||||
lastParentExecution.set(exec);
|
||||
parentLatch.countDown();
|
||||
}
|
||||
});
|
||||
Execution lastExecution = runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().equals(Type.FAILED), tenantId, NAMESPACE, "failed-first");
|
||||
assertThat(lastExecution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(lastExecution.getTaskRunList()).hasSize(1);
|
||||
assertThat(lastExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// restart the subflow
|
||||
Flow flow = flowRepository.findByExecution(lastExecution.get());
|
||||
Execution markedAs = executionService.markAs(lastExecution.get(), flow, lastExecution.get().getTaskRunList().getFirst().getId(), State.Type.SUCCESS);
|
||||
executionQueue.emit(markedAs);
|
||||
Flow flow = flowRepository.findByExecution(lastExecution);
|
||||
Execution markedAs = executionService.markAs(lastExecution, flow, lastExecution.getTaskRunList().getFirst().getId(), State.Type.SUCCESS);
|
||||
runnerUtils.emitAndAwaitExecution(e -> e.getState().isTerminated(), markedAs);
|
||||
|
||||
//We wait for the subflow execution to pass from failed to success
|
||||
Execution lastParentExecution = runnerUtils.awaitFlowExecution(e ->
|
||||
e.getTaskRunList().getFirst().getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE, "subflow-parent-of-failed");
|
||||
|
||||
// assert for the parent flow
|
||||
assertThat(parentLatch.await(10, TimeUnit.SECONDS)).isTrue();
|
||||
receivedExecutions.blockLast();
|
||||
assertThat(lastParentExecution.get().getState().getCurrent()).isEqualTo(State.Type.FAILED); // FIXME should be success but it's FAILED on unit tests
|
||||
assertThat(lastParentExecution.get().getTaskRunList()).hasSize(1);
|
||||
assertThat(lastParentExecution.get().getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(lastParentExecution.getState().getCurrent()).isEqualTo(State.Type.FAILED); // FIXME should be success but it's FAILED on unit tests
|
||||
assertThat(lastParentExecution.getTaskRunList()).hasSize(1);
|
||||
assertThat(lastParentExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class EmptyVariablesTest {
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
@Inject
|
||||
private FlowInputOutput flowIO;
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ class ExecutionServiceTest {
|
||||
LogRepositoryInterface logRepository;
|
||||
|
||||
@Inject
|
||||
RunnerUtils runnerUtils;
|
||||
TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/restart_last_failed.yaml"})
|
||||
|
||||
@@ -3,19 +3,15 @@ package io.kestra.core.runners;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.State.History;
|
||||
import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.ExecutionService;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
@@ -25,24 +21,21 @@ import java.net.URISyntaxException;
|
||||
import java.nio.file.Files;
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@Singleton
|
||||
public class FlowConcurrencyCaseTest {
|
||||
|
||||
public static final String NAMESPACE = "io.kestra.tests";
|
||||
@Inject
|
||||
private StorageInterface storageInterface;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private FlowInputOutput flowIO;
|
||||
@@ -50,354 +43,168 @@ public class FlowConcurrencyCaseTest {
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
protected QueueInterface<Execution> executionQueue;
|
||||
|
||||
@Inject
|
||||
private ExecutionService executionService;
|
||||
|
||||
public void flowConcurrencyCancel() throws TimeoutException, QueueException, InterruptedException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
|
||||
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel");
|
||||
public void flowConcurrencyCancel() throws TimeoutException, QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
|
||||
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel");
|
||||
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
|
||||
|
||||
CountDownLatch latch1 = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (e.getLeft().getId().equals(execution1.getId())) {
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
|
||||
latch1.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME we should fail if we receive the cancel execution again but on Kafka it happens
|
||||
});
|
||||
|
||||
assertTrue(latch1.await(1, TimeUnit.MINUTES));
|
||||
receive.blockLast();
|
||||
runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
|
||||
}
|
||||
|
||||
public void flowConcurrencyFail() throws TimeoutException, QueueException, InterruptedException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
|
||||
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-fail");
|
||||
public void flowConcurrencyFail() throws TimeoutException, QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
|
||||
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail");
|
||||
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
CountDownLatch latch1 = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (e.getLeft().getId().equals(execution1.getId())) {
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
|
||||
latch1.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME we should fail if we receive the cancel execution again but on Kafka it happens
|
||||
});
|
||||
|
||||
assertTrue(latch1.await(1, TimeUnit.MINUTES));
|
||||
receive.blockLast();
|
||||
runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueue() throws TimeoutException, QueueException, InterruptedException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
|
||||
public void flowConcurrencyQueue() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue", Optional.empty())
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
executionQueue.emit(execution2);
|
||||
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
|
||||
Execution executionResult1 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
|
||||
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
|
||||
|
||||
var executionResult1 = new AtomicReference<Execution>();
|
||||
var executionResult2 = new AtomicReference<Execution>();
|
||||
|
||||
CountDownLatch latch1 = new CountDownLatch(1);
|
||||
CountDownLatch latch2 = new CountDownLatch(1);
|
||||
CountDownLatch latch3 = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (e.getLeft().getId().equals(execution1.getId())) {
|
||||
executionResult1.set(e.getLeft());
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
|
||||
latch1.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
if (e.getLeft().getId().equals(execution2.getId())) {
|
||||
executionResult2.set(e.getLeft());
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.RUNNING) {
|
||||
latch2.countDown();
|
||||
}
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
|
||||
latch3.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assertTrue(latch1.await(1, TimeUnit.MINUTES));
|
||||
assertTrue(latch2.await(1, TimeUnit.MINUTES));
|
||||
assertTrue(latch3.await(1, TimeUnit.MINUTES));
|
||||
receive.blockLast();
|
||||
|
||||
assertThat(executionResult1.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(executionResult2.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(executionResult2.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
assertThat(executionResult1.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(executionResult2.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueuePause() throws TimeoutException, QueueException, InterruptedException {
|
||||
AtomicReference<String> firstExecutionId = new AtomicReference<>();
|
||||
var firstExecutionResult = new AtomicReference<Execution>();
|
||||
var secondExecutionResult = new AtomicReference<Execution>();
|
||||
|
||||
CountDownLatch firstExecutionLatch = new CountDownLatch(1);
|
||||
CountDownLatch secondExecutionLatch = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (!"flow-concurrency-queue-pause".equals(e.getLeft().getFlowId())){
|
||||
return;
|
||||
}
|
||||
String currentId = e.getLeft().getId();
|
||||
Type currentState = e.getLeft().getState().getCurrent();
|
||||
if (firstExecutionId.get() == null) {
|
||||
firstExecutionId.set(currentId);
|
||||
}
|
||||
|
||||
if (currentId.equals(firstExecutionId.get())) {
|
||||
if (currentState == State.Type.SUCCESS) {
|
||||
firstExecutionResult.set(e.getLeft());
|
||||
firstExecutionLatch.countDown();
|
||||
}
|
||||
} else {
|
||||
if (currentState == State.Type.SUCCESS) {
|
||||
secondExecutionResult.set(e.getLeft());
|
||||
secondExecutionLatch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-pause");
|
||||
public void flowConcurrencyQueuePause() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause");
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-pause", Optional.empty())
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
executionQueue.emit(execution2);
|
||||
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
|
||||
Execution firstExecutionResult = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
|
||||
|
||||
assertThat(execution1.getState().isPaused()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
|
||||
|
||||
assertTrue(firstExecutionLatch.await(10, TimeUnit.SECONDS));
|
||||
assertTrue(secondExecutionLatch.await(10, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
|
||||
assertThat(firstExecutionResult.get().getId()).isEqualTo(execution1.getId());
|
||||
assertThat(firstExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(secondExecutionResult.get().getId()).isEqualTo(execution2.getId());
|
||||
assertThat(secondExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(secondExecutionResult.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(secondExecutionResult.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(secondExecutionResult.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
assertThat(firstExecutionResult.getId()).isEqualTo(execution1.getId());
|
||||
assertThat(firstExecutionResult.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(secondExecutionResult.getId()).isEqualTo(execution2.getId());
|
||||
assertThat(secondExecutionResult.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(secondExecutionResult.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(secondExecutionResult.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(secondExecutionResult.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
public void flowConcurrencyCancelPause() throws TimeoutException, QueueException, InterruptedException {
|
||||
AtomicReference<String> firstExecutionId = new AtomicReference<>();
|
||||
var firstExecutionResult = new AtomicReference<Execution>();
|
||||
var secondExecutionResult = new AtomicReference<Execution>();
|
||||
CountDownLatch firstExecLatch = new CountDownLatch(1);
|
||||
CountDownLatch secondExecLatch = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (!"flow-concurrency-cancel-pause".equals(e.getLeft().getFlowId())){
|
||||
return;
|
||||
}
|
||||
String currentId = e.getLeft().getId();
|
||||
Type currentState = e.getLeft().getState().getCurrent();
|
||||
if (firstExecutionId.get() == null) {
|
||||
firstExecutionId.set(currentId);
|
||||
}
|
||||
if (currentId.equals(firstExecutionId.get())) {
|
||||
if (currentState == State.Type.SUCCESS) {
|
||||
firstExecutionResult.set(e.getLeft());
|
||||
firstExecLatch.countDown();
|
||||
}
|
||||
} else {
|
||||
if (currentState == State.Type.CANCELLED) {
|
||||
secondExecutionResult.set(e.getLeft());
|
||||
secondExecLatch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel-pause");
|
||||
public void flowConcurrencyCancelPause() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause");
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel-pause", Optional.empty())
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
executionQueue.emit(execution2);
|
||||
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.CANCELLED), execution2);
|
||||
Execution firstExecutionResult = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
|
||||
|
||||
assertThat(execution1.getState().isPaused()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
|
||||
|
||||
assertTrue(firstExecLatch.await(10, TimeUnit.SECONDS));
|
||||
assertTrue(secondExecLatch.await(10, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
|
||||
assertThat(firstExecutionResult.get().getId()).isEqualTo(execution1.getId());
|
||||
assertThat(firstExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(secondExecutionResult.get().getId()).isEqualTo(execution2.getId());
|
||||
assertThat(secondExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
|
||||
assertThat(secondExecutionResult.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(secondExecutionResult.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.CANCELLED);
|
||||
assertThat(firstExecutionResult.getId()).isEqualTo(execution1.getId());
|
||||
assertThat(firstExecutionResult.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(secondExecutionResult.getId()).isEqualTo(execution2.getId());
|
||||
assertThat(secondExecutionResult.getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
|
||||
assertThat(secondExecutionResult.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(secondExecutionResult.getState().getHistories().get(1).getState()).isEqualTo(State.Type.CANCELLED);
|
||||
}
|
||||
|
||||
public void flowConcurrencyWithForEachItem() throws TimeoutException, QueueException, InterruptedException, URISyntaxException, IOException {
|
||||
URI file = storageUpload();
|
||||
public void flowConcurrencyWithForEachItem(String tenantId) throws QueueException, URISyntaxException, IOException {
|
||||
URI file = storageUpload(tenantId);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
|
||||
Execution forEachItem = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-for-each-item", null,
|
||||
Execution forEachItem = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-for-each-item", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs), Duration.ofSeconds(5));
|
||||
assertThat(forEachItem.getState().getCurrent()).isEqualTo(Type.RUNNING);
|
||||
|
||||
Set<String> executionIds = new HashSet<>();
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if ("flow-concurrency-queue".equals(e.getLeft().getFlowId()) && e.getLeft().getState().isRunning()) {
|
||||
executionIds.add(e.getLeft().getId());
|
||||
}
|
||||
});
|
||||
|
||||
// wait a little to be sure there are not too many executions started
|
||||
Thread.sleep(500);
|
||||
|
||||
assertThat(executionIds).hasSize(1);
|
||||
receive.blockLast();
|
||||
|
||||
Execution terminated = runnerUtils.awaitExecution(e -> e.getId().equals(forEachItem.getId()) && e.getState().isTerminated(), () -> {}, Duration.ofSeconds(10));
|
||||
Execution terminated = runnerUtils.awaitExecution(e -> e.getState().isTerminated(),forEachItem);
|
||||
assertThat(terminated.getState().getCurrent()).isEqualTo(Type.SUCCESS);
|
||||
|
||||
List<Execution> executions = runnerUtils.awaitFlowExecutionNumber(2, tenantId, NAMESPACE, "flow-concurrency-queue");
|
||||
|
||||
assertThat(executions).extracting(e -> e.getState().getCurrent()).containsOnly(Type.SUCCESS);
|
||||
assertThat(executions.stream()
|
||||
.map(e -> e.getState().getHistories())
|
||||
.flatMap(List::stream)
|
||||
.map(History::getState)
|
||||
.toList()).contains(Type.QUEUED);
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueueRestarted() throws Exception {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-fail", null, null, Duration.ofSeconds(30));
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE,
|
||||
"flow-concurrency-queue-fail", null, null, Duration.ofSeconds(30));
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-fail", Optional.empty())
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
executionQueue.emit(execution2);
|
||||
runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
|
||||
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
|
||||
|
||||
var executionResult1 = new AtomicReference<Execution>();
|
||||
var executionResult2 = new AtomicReference<Execution>();
|
||||
|
||||
CountDownLatch latch1 = new CountDownLatch(2);
|
||||
AtomicReference<Execution> failedExecution = new AtomicReference<>();
|
||||
CountDownLatch latch2 = new CountDownLatch(1);
|
||||
CountDownLatch latch3 = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (e.getLeft().getId().equals(execution1.getId())) {
|
||||
executionResult1.set(e.getLeft());
|
||||
if (e.getLeft().getState().getCurrent() == Type.FAILED) {
|
||||
failedExecution.set(e.getLeft());
|
||||
latch1.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
if (e.getLeft().getId().equals(execution2.getId())) {
|
||||
executionResult2.set(e.getLeft());
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.RUNNING) {
|
||||
latch2.countDown();
|
||||
}
|
||||
if (e.getLeft().getState().getCurrent() == Type.FAILED) {
|
||||
latch3.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assertTrue(latch2.await(1, TimeUnit.MINUTES));
|
||||
assertThat(failedExecution.get()).isNotNull();
|
||||
// here the first fail and the second is now running.
|
||||
// we restart the first one, it should be queued then fail again.
|
||||
Execution restarted = executionService.restart(failedExecution.get(), null);
|
||||
executionQueue.emit(restarted);
|
||||
Execution failedExecution = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution1);
|
||||
Execution restarted = executionService.restart(failedExecution, null);
|
||||
Execution executionResult1 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), restarted);
|
||||
Execution executionResult2 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution2);
|
||||
|
||||
assertTrue(latch3.await(1, TimeUnit.MINUTES));
|
||||
assertTrue(latch1.await(1, TimeUnit.MINUTES));
|
||||
receive.blockLast();
|
||||
|
||||
assertThat(executionResult1.get().getState().getCurrent()).isEqualTo(Type.FAILED);
|
||||
assertThat(executionResult1.getState().getCurrent()).isEqualTo(Type.FAILED);
|
||||
// it should have been queued after restarted
|
||||
assertThat(executionResult1.get().getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED)).isTrue();
|
||||
assertThat(executionResult1.get().getState().getHistories().stream().anyMatch(history -> history.getState() == Type.QUEUED)).isTrue();
|
||||
assertThat(executionResult2.get().getState().getCurrent()).isEqualTo(Type.FAILED);
|
||||
assertThat(executionResult2.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(executionResult2.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(executionResult2.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
assertThat(executionResult1.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED)).isTrue();
|
||||
assertThat(executionResult1.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.QUEUED)).isTrue();
|
||||
assertThat(executionResult2.getState().getCurrent()).isEqualTo(Type.FAILED);
|
||||
assertThat(executionResult2.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(executionResult2.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
public void flowConcurrencyQueueAfterExecution() throws TimeoutException, QueueException, InterruptedException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
|
||||
public void flowConcurrencyQueueAfterExecution() throws QueueException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
|
||||
Flow flow = flowRepository
|
||||
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-after-execution", Optional.empty())
|
||||
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
|
||||
.orElseThrow();
|
||||
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
|
||||
executionQueue.emit(execution2);
|
||||
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
|
||||
Execution executionResult1 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
|
||||
|
||||
assertThat(execution1.getState().isRunning()).isTrue();
|
||||
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
|
||||
|
||||
var executionResult1 = new AtomicReference<Execution>();
|
||||
var executionResult2 = new AtomicReference<Execution>();
|
||||
|
||||
CountDownLatch latch1 = new CountDownLatch(1);
|
||||
CountDownLatch latch2 = new CountDownLatch(1);
|
||||
CountDownLatch latch3 = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (e.getLeft().getId().equals(execution1.getId())) {
|
||||
executionResult1.set(e.getLeft());
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
|
||||
latch1.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
if (e.getLeft().getId().equals(execution2.getId())) {
|
||||
executionResult2.set(e.getLeft());
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.RUNNING) {
|
||||
latch2.countDown();
|
||||
}
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
|
||||
latch3.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assertTrue(latch1.await(1, TimeUnit.MINUTES));
|
||||
assertTrue(latch2.await(1, TimeUnit.MINUTES));
|
||||
assertTrue(latch3.await(1, TimeUnit.MINUTES));
|
||||
receive.blockLast();
|
||||
|
||||
assertThat(executionResult1.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(executionResult2.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(executionResult2.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
assertThat(executionResult1.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(executionResult2.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(executionResult2.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
|
||||
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
|
||||
}
|
||||
|
||||
private URI storageUpload() throws URISyntaxException, IOException {
|
||||
public void flowConcurrencySubflow(String tenantId) throws TimeoutException, QueueException {
|
||||
runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-subflow", null, null, Duration.ofSeconds(30));
|
||||
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-subflow");
|
||||
|
||||
List<Execution> subFlowExecs = runnerUtils.awaitFlowExecutionNumber(2, tenantId, NAMESPACE, "flow-concurrency-cancel");
|
||||
assertThat(subFlowExecs).extracting(e -> e.getState().getCurrent()).containsExactlyInAnyOrder(Type.SUCCESS, Type.CANCELLED);
|
||||
|
||||
// run another execution to be sure that everything work (purge is correctly done)
|
||||
Execution execution3 = runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-subflow");
|
||||
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.SUCCESS);
|
||||
runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE, "flow-concurrency-cancel");
|
||||
}
|
||||
|
||||
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
|
||||
File tempFile = File.createTempFile("file", ".txt");
|
||||
|
||||
Files.write(tempFile.toPath(), content());
|
||||
|
||||
return storageInterface.put(
|
||||
MAIN_TENANT,
|
||||
tenantId,
|
||||
null,
|
||||
new URI("/file/storage/file.txt"),
|
||||
new FileInputStream(tempFile)
|
||||
|
||||
@@ -4,19 +4,22 @@ import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import lombok.SneakyThrows;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.FlowListenersInterface;
|
||||
import io.kestra.plugin.core.debug.Return;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import jakarta.inject.Inject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
@@ -24,11 +27,11 @@ abstract public class FlowListenersTest {
|
||||
@Inject
|
||||
protected FlowRepositoryInterface flowRepository;
|
||||
|
||||
protected static FlowWithSource create(String flowId, String taskId) {
|
||||
protected static FlowWithSource create(String tenantId, String flowId, String taskId) {
|
||||
FlowWithSource flow = FlowWithSource.builder()
|
||||
.id(flowId)
|
||||
.namespace("io.kestra.unittest")
|
||||
.tenantId(MAIN_TENANT)
|
||||
.tenantId(tenantId)
|
||||
.revision(1)
|
||||
.tasks(Collections.singletonList(Return.builder()
|
||||
.id(taskId)
|
||||
@@ -39,88 +42,65 @@ abstract public class FlowListenersTest {
|
||||
return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
|
||||
}
|
||||
|
||||
public void suite(FlowListenersInterface flowListenersService) {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(FlowListenersTest.class);
|
||||
|
||||
public void suite(FlowListenersInterface flowListenersService) throws TimeoutException {
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
flowListenersService.run();
|
||||
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
var ref = new Ref();
|
||||
|
||||
flowListenersService.listen(flows -> {
|
||||
count.set(flows.size());
|
||||
ref.countDownLatch.countDown();
|
||||
});
|
||||
flowListenersService.listen(flows -> count.set(getFlowsForTenant(flowListenersService, tenant).size()));
|
||||
|
||||
// initial state
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isZero();
|
||||
assertThat(flowListenersService.flows().size()).isZero();
|
||||
});
|
||||
LOG.info("-----------> wait for zero");
|
||||
Await.until(() -> count.get() == 0, Duration.ofMillis(10), Duration.ofSeconds(5));
|
||||
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isZero();
|
||||
|
||||
// resend on startup done for kafka
|
||||
LOG.info("-----------> wait for zero kafka");
|
||||
if (flowListenersService.getClass().getName().equals("io.kestra.ee.runner.kafka.KafkaFlowListeners")) {
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isZero();
|
||||
assertThat(flowListenersService.flows().size()).isZero();
|
||||
});
|
||||
Await.until(() -> count.get() == 0, Duration.ofMillis(10), Duration.ofSeconds(5));
|
||||
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isZero();
|
||||
}
|
||||
|
||||
// create first
|
||||
FlowWithSource first = create("first_" + IdUtils.create(), "test");
|
||||
FlowWithSource firstUpdated = create(first.getId(), "test2");
|
||||
LOG.info("-----------> create fist flow");
|
||||
FlowWithSource first = create(tenant, "first_" + IdUtils.create(), "test");
|
||||
FlowWithSource firstUpdated = create(tenant, first.getId(), "test2");
|
||||
|
||||
|
||||
flowRepository.create(GenericFlow.of(first));
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isEqualTo(1);
|
||||
assertThat(flowListenersService.flows().size()).isEqualTo(1);
|
||||
});
|
||||
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
|
||||
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
|
||||
|
||||
// create the same id than first, no additional flows
|
||||
first = flowRepository.update(GenericFlow.of(firstUpdated), first);
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isEqualTo(1);
|
||||
assertThat(flowListenersService.flows().size()).isEqualTo(1);
|
||||
//assertThat(flowListenersService.flows().getFirst().getFirst().getId(), is("test2"));
|
||||
});
|
||||
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
|
||||
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
|
||||
|
||||
FlowWithSource second = create("second_" + IdUtils.create(), "test");
|
||||
FlowWithSource second = create(tenant, "second_" + IdUtils.create(), "test");
|
||||
// create a new one
|
||||
flowRepository.create(GenericFlow.of(second));
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isEqualTo(2);
|
||||
assertThat(flowListenersService.flows().size()).isEqualTo(2);
|
||||
});
|
||||
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
|
||||
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
|
||||
|
||||
// delete first
|
||||
FlowWithSource deleted = flowRepository.delete(first);
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isEqualTo(1);
|
||||
assertThat(flowListenersService.flows().size()).isEqualTo(1);
|
||||
});
|
||||
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
|
||||
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
|
||||
|
||||
// restore must works
|
||||
flowRepository.create(GenericFlow.of(first));
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isEqualTo(2);
|
||||
assertThat(flowListenersService.flows().size()).isEqualTo(2);
|
||||
});
|
||||
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
|
||||
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
|
||||
|
||||
FlowWithSource withTenant = first.toBuilder().tenantId("some-tenant").build();
|
||||
flowRepository.create(GenericFlow.of(withTenant));
|
||||
wait(ref, () -> {
|
||||
assertThat(count.get()).isEqualTo(3);
|
||||
assertThat(flowListenersService.flows().size()).isEqualTo(3);
|
||||
});
|
||||
}
|
||||
|
||||
public static class Ref {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
public List<FlowWithSource> getFlowsForTenant(FlowListenersInterface flowListenersService, String tenantId){
|
||||
return flowListenersService.flows().stream()
|
||||
.filter(f -> tenantId.equals(f.getTenantId()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void wait(Ref ref, Runnable run) {
|
||||
ref.countDownLatch.await(60, TimeUnit.SECONDS);
|
||||
run.run();
|
||||
ref.countDownLatch = new CountDownLatch(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,82 +2,61 @@ package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.time.Instant;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@Singleton
|
||||
public class FlowTriggerCaseTest {
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
protected QueueInterface<Execution> executionQueue;
|
||||
|
||||
public static final String NAMESPACE = "io.kestra.tests.trigger";
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
public void trigger() throws InterruptedException, TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(3);
|
||||
AtomicReference<Execution> flowListener = new AtomicReference<>();
|
||||
AtomicReference<Execution> flowListenerNoInput = new AtomicReference<>();
|
||||
AtomicReference<Execution> flowListenerNamespace = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getState().getCurrent() == State.Type.SUCCESS) {
|
||||
if (flowListenerNoInput.get() == null && execution.getFlowId().equals("trigger-flow-listener-no-inputs")) {
|
||||
flowListenerNoInput.set(execution);
|
||||
countDownLatch.countDown();
|
||||
} else if (flowListener.get() == null && execution.getFlowId().equals("trigger-flow-listener")) {
|
||||
flowListener.set(execution);
|
||||
countDownLatch.countDown();
|
||||
} else if (flowListenerNamespace.get() == null && execution.getFlowId().equals("trigger-flow-listener-namespace-condition")) {
|
||||
flowListenerNamespace.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger", "trigger-flow");
|
||||
public void trigger(String tenantId) throws InterruptedException, TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(tenantId, NAMESPACE, "trigger-flow");
|
||||
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
assertTrue(countDownLatch.await(15, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
Execution flowListenerNoInput = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE,
|
||||
"trigger-flow-listener-no-inputs");
|
||||
Execution flowListener = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE,
|
||||
"trigger-flow-listener");
|
||||
Execution flowListenerNamespace = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE,
|
||||
"trigger-flow-listener-namespace-condition");
|
||||
|
||||
assertThat(flowListener.get().getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(flowListener.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(flowListener.get().getTaskRunList().getFirst().getOutputs().get("value")).isEqualTo("childs: from parents: " + execution.getId());
|
||||
assertThat(flowListener.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(flowListener.get().getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
|
||||
assertThat(flowListener.get().getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
|
||||
|
||||
assertThat(flowListenerNoInput.get().getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(flowListenerNoInput.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(flowListenerNoInput.get().getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
|
||||
assertThat(flowListenerNoInput.get().getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
|
||||
assertThat(flowListenerNoInput.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(flowListener.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(flowListener.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(flowListener.getTaskRunList().getFirst().getOutputs().get("value")).isEqualTo("childs: from parents: " + execution.getId());
|
||||
assertThat(flowListener.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(flowListener.getTrigger().getVariables().get("namespace")).isEqualTo(NAMESPACE);
|
||||
assertThat(flowListener.getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
|
||||
|
||||
assertThat(flowListenerNamespace.get().getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(flowListenerNamespace.get().getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
|
||||
assertThat(flowListenerNoInput.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(flowListenerNoInput.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(flowListenerNoInput.getTrigger().getVariables().get("namespace")).isEqualTo(NAMESPACE);
|
||||
assertThat(flowListenerNoInput.getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
|
||||
assertThat(flowListenerNoInput.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
assertThat(flowListenerNamespace.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(flowListenerNamespace.getTrigger().getVariables().get("namespace")).isEqualTo(NAMESPACE);
|
||||
// it will be triggered for 'trigger-flow' or any of the 'trigger-flow-listener*', so we only assert that it's one of them
|
||||
assertThat(flowListenerNamespace.get().getTrigger().getVariables().get("flowId"))
|
||||
assertThat(flowListenerNamespace.getTrigger().getVariables().get("flowId"))
|
||||
.satisfiesAnyOf(
|
||||
arg -> assertThat(arg).isEqualTo("trigger-flow"),
|
||||
arg -> assertThat(arg).isEqualTo("trigger-flow-listener-no-inputs"),
|
||||
@@ -85,56 +64,43 @@ public class FlowTriggerCaseTest {
|
||||
);
|
||||
}
|
||||
|
||||
public void triggerWithPause() throws InterruptedException, TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(4);
|
||||
List<Execution> flowListeners = new ArrayList<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId().equals("trigger-flow-listener-with-pause")) {
|
||||
flowListeners.add(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
public void triggerWithPause() throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.pause", "trigger-flow-with-pause");
|
||||
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(3);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
assertTrue(countDownLatch.await(15, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
List<Execution> triggeredExec = runnerUtils.awaitFlowExecutionNumber(
|
||||
4,
|
||||
MAIN_TENANT,
|
||||
"io.kestra.tests.trigger.pause",
|
||||
"trigger-flow-listener-with-pause");
|
||||
|
||||
assertThat(flowListeners.size()).isEqualTo(4);
|
||||
assertThat(flowListeners.get(0).getOutputs().get("status")).isEqualTo("RUNNING");
|
||||
assertThat(flowListeners.get(1).getOutputs().get("status")).isEqualTo("PAUSED");
|
||||
assertThat(flowListeners.get(2).getOutputs().get("status")).isEqualTo("RUNNING");
|
||||
assertThat(flowListeners.get(3).getOutputs().get("status")).isEqualTo("SUCCESS");
|
||||
assertThat(triggeredExec.size()).isEqualTo(4);
|
||||
List<Execution> sortedExecs = triggeredExec.stream()
|
||||
.sorted(Comparator.comparing(e -> e.getState().getEndDate().orElse(Instant.now())))
|
||||
.toList();
|
||||
assertThat(sortedExecs.get(0).getOutputs().get("status")).isEqualTo("RUNNING");
|
||||
assertThat(sortedExecs.get(1).getOutputs().get("status")).isEqualTo("PAUSED");
|
||||
assertThat(sortedExecs.get(2).getOutputs().get("status")).isEqualTo("RUNNING");
|
||||
assertThat(sortedExecs.get(3).getOutputs().get("status")).isEqualTo("SUCCESS");
|
||||
}
|
||||
|
||||
public void triggerWithConcurrencyLimit() throws QueueException, TimeoutException, InterruptedException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(5);
|
||||
List<Execution> flowListeners = new ArrayList<>();
|
||||
public void triggerWithConcurrencyLimit(String tenantId) throws QueueException, TimeoutException {
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
|
||||
Execution execution2 = runnerUtils.runOneUntilRunning(tenantId, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId().equals("trigger-flow-listener-with-concurrency-limit")) {
|
||||
flowListeners.add(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
List<Execution> triggeredExec = runnerUtils.awaitFlowExecutionNumber(
|
||||
5,
|
||||
tenantId,
|
||||
"io.kestra.tests.trigger.concurrency",
|
||||
"trigger-flow-listener-with-concurrency-limit");
|
||||
|
||||
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
|
||||
Execution execution2 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
|
||||
|
||||
assertTrue(countDownLatch.await(15, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
|
||||
assertThat(flowListeners.size()).isEqualTo(5);
|
||||
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
|
||||
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
|
||||
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("QUEUED") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
|
||||
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
|
||||
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
|
||||
assertThat(triggeredExec.size()).isEqualTo(5);
|
||||
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
|
||||
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
|
||||
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("QUEUED") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
|
||||
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
|
||||
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ public class InputsTest {
|
||||
private QueueInterface<LogEntry> logQueue;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
|
||||
.put("string", "myString")
|
||||
|
||||
@@ -24,7 +24,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
class ListenersTest {
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private LocalFlowRepositoryLoader repositoryLoader;
|
||||
|
||||
@@ -1,243 +1,168 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.core.repositories.ArrayListTotal;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@Singleton
|
||||
public class MultipleConditionTriggerCaseTest {
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
protected QueueInterface<Execution> executionQueue;
|
||||
public static final String NAMESPACE = "io.kestra.tests.trigger";
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
protected FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
protected ExecutionRepositoryInterface executionRepository;
|
||||
|
||||
@Inject
|
||||
protected ApplicationContext applicationContext;
|
||||
|
||||
public void trigger() throws InterruptedException, TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(3);
|
||||
ConcurrentHashMap<String, Execution> ended = new ConcurrentHashMap<>();
|
||||
List<String> watchedExecutions = List.of("trigger-multiplecondition-flow-a",
|
||||
"trigger-multiplecondition-flow-b",
|
||||
"trigger-multiplecondition-listener"
|
||||
);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (watchedExecutions.contains(execution.getFlowId()) && execution.getState().getCurrent() == State.Type.SUCCESS) {
|
||||
ended.put(execution.getId(), execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
// first one
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
|
||||
"trigger-multiplecondition-flow-a", Duration.ofSeconds(60));
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "trigger-multiplecondition-flow-a");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// wait a little to be sure that the trigger is not launching execution
|
||||
Thread.sleep(1000);
|
||||
assertThat(ended.size()).isEqualTo(1);
|
||||
ArrayListTotal<Execution> flowBExecutions = executionRepository.findByFlowId(MAIN_TENANT,
|
||||
NAMESPACE, "trigger-multiplecondition-flow-b", Pageable.UNPAGED);
|
||||
ArrayListTotal<Execution> listenerExecutions = executionRepository.findByFlowId(MAIN_TENANT,
|
||||
NAMESPACE, "trigger-multiplecondition-listener", Pageable.UNPAGED);
|
||||
assertThat(flowBExecutions).isEmpty();
|
||||
assertThat(listenerExecutions).isEmpty();
|
||||
|
||||
// second one
|
||||
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
|
||||
"trigger-multiplecondition-flow-b", Duration.ofSeconds(60));
|
||||
execution = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "trigger-multiplecondition-flow-b");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// trigger is done
|
||||
assertTrue(countDownLatch.await(10, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
assertThat(ended.size()).isEqualTo(3);
|
||||
|
||||
Flow flow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.trigger",
|
||||
"trigger-multiplecondition-listener").orElseThrow();
|
||||
Execution triggerExecution = ended.entrySet()
|
||||
.stream()
|
||||
.filter(e -> e.getValue().getFlowId().equals(flow.getId()))
|
||||
.findFirst()
|
||||
.map(Map.Entry::getValue)
|
||||
.orElseThrow();
|
||||
Execution triggerExecution = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, NAMESPACE, "trigger-multiplecondition-listener");
|
||||
|
||||
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
assertThat(triggerExecution.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(triggerExecution.getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
|
||||
assertThat(triggerExecution.getTrigger().getVariables().get("namespace")).isEqualTo(
|
||||
NAMESPACE);
|
||||
assertThat(triggerExecution.getTrigger().getVariables().get("flowId")).isEqualTo("trigger-multiplecondition-flow-b");
|
||||
}
|
||||
|
||||
public void failed() throws InterruptedException, TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> listener = new AtomicReference<>();
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("trigger-flow-listener-namespace-condition")
|
||||
&& execution.getState().getCurrent().isTerminated()) {
|
||||
listener.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
public void failed(String tenantId) throws InterruptedException, TimeoutException, QueueException {
|
||||
// first one
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
|
||||
"trigger-multiplecondition-flow-c", Duration.ofSeconds(60));
|
||||
Execution execution = runnerUtils.runOne(tenantId, NAMESPACE,
|
||||
"trigger-multiplecondition-flow-c");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// wait a little to be sure that the trigger is not launching execution
|
||||
Thread.sleep(1000);
|
||||
assertThat(listener.get()).isNull();
|
||||
ArrayListTotal<Execution> byFlowId = executionRepository.findByFlowId(tenantId, NAMESPACE,
|
||||
"trigger-multiplecondition-flow-d", Pageable.UNPAGED);
|
||||
assertThat(byFlowId).isEmpty();
|
||||
|
||||
// second one
|
||||
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
|
||||
"trigger-multiplecondition-flow-d", Duration.ofSeconds(60));
|
||||
execution = runnerUtils.runOne(tenantId, NAMESPACE,
|
||||
"trigger-multiplecondition-flow-d");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
Execution triggerExecution = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
tenantId, NAMESPACE, "trigger-flow-listener-namespace-condition");
|
||||
|
||||
// trigger was not done
|
||||
assertTrue(countDownLatch.await(10, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
assertThat(listener.get()).isNotNull();
|
||||
assertThat(listener.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
public void flowTriggerPreconditions()
|
||||
throws InterruptedException, TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> flowTrigger = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId()
|
||||
.equals("flow-trigger-preconditions-flow-listen")) {
|
||||
flowTrigger.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
public void flowTriggerPreconditions() throws TimeoutException, QueueException {
|
||||
|
||||
// flowA
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
|
||||
"flow-trigger-preconditions-flow-a", Duration.ofSeconds(60));
|
||||
"flow-trigger-preconditions-flow-a");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// flowB: we trigger it two times, as flow-trigger-flow-preconditions-flow-listen is configured with resetOnSuccess: false it should be triggered two times
|
||||
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
|
||||
"flow-trigger-preconditions-flow-a", Duration.ofSeconds(60));
|
||||
"flow-trigger-preconditions-flow-a");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
|
||||
"flow-trigger-preconditions-flow-b", Duration.ofSeconds(60));
|
||||
"flow-trigger-preconditions-flow-b");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// trigger is done
|
||||
assertTrue(countDownLatch.await(1, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
assertThat(flowTrigger.get()).isNotNull();
|
||||
Execution triggerExecution = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, "io.kestra.tests.trigger.preconditions", "flow-trigger-preconditions-flow-listen");
|
||||
|
||||
Execution triggerExecution = flowTrigger.get();
|
||||
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggerExecution.getTrigger().getVariables().get("outputs")).isNotNull();
|
||||
assertThat((Map<String, Object>) triggerExecution.getTrigger().getVariables().get("outputs")).containsEntry("some", "value");
|
||||
}
|
||||
|
||||
public void flowTriggerPreconditionsMergeOutputs() throws QueueException, TimeoutException, InterruptedException {
|
||||
public void flowTriggerPreconditionsMergeOutputs(String tenantId) throws QueueException, TimeoutException {
|
||||
// we do the same as in flowTriggerPreconditions() but we trigger flows in the opposite order to be sure that outputs are merged
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> flowTrigger = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId()
|
||||
.equals("flow-trigger-preconditions-flow-listen")) {
|
||||
flowTrigger.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
// flowB
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
|
||||
"flow-trigger-preconditions-flow-b", Duration.ofSeconds(60));
|
||||
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests.trigger.preconditions",
|
||||
"flow-trigger-preconditions-flow-b");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// flowA
|
||||
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
|
||||
"flow-trigger-preconditions-flow-a", Duration.ofSeconds(60));
|
||||
execution = runnerUtils.runOne(tenantId, "io.kestra.tests.trigger.preconditions",
|
||||
"flow-trigger-preconditions-flow-a");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// trigger is done
|
||||
assertTrue(countDownLatch.await(1, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
assertThat(flowTrigger.get()).isNotNull();
|
||||
Execution triggerExecution = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
tenantId, "io.kestra.tests.trigger.preconditions", "flow-trigger-preconditions-flow-listen");
|
||||
|
||||
Execution triggerExecution = flowTrigger.get();
|
||||
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggerExecution.getTrigger().getVariables().get("outputs")).isNotNull();
|
||||
assertThat((Map<String, Object>) triggerExecution.getTrigger().getVariables().get("outputs")).containsEntry("some", "value");
|
||||
}
|
||||
|
||||
public void flowTriggerOnPaused()
|
||||
throws InterruptedException, TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> flowTrigger = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId()
|
||||
.equals("flow-trigger-paused-listen")) {
|
||||
flowTrigger.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
public void flowTriggerOnPaused() throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.paused",
|
||||
"flow-trigger-paused-flow", Duration.ofSeconds(60));
|
||||
"flow-trigger-paused-flow");
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(2);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// trigger is done
|
||||
assertTrue(countDownLatch.await(1, TimeUnit.SECONDS));
|
||||
receive.blockLast();
|
||||
assertThat(flowTrigger.get()).isNotNull();
|
||||
Execution triggerExecution = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().equals(Type.SUCCESS),
|
||||
MAIN_TENANT, "io.kestra.tests.trigger.paused", "flow-trigger-paused-listen");
|
||||
|
||||
Execution triggerExecution = flowTrigger.get();
|
||||
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
@Singleton
|
||||
public class PluginDefaultsCaseTest {
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
public void taskDefaults() throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "plugin-defaults", Duration.ofSeconds(60));
|
||||
|
||||
@@ -4,29 +4,19 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.TaskRun;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.services.ExecutionService;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@Singleton
|
||||
@@ -35,38 +25,30 @@ public class RestartCaseTest {
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private ExecutionService executionService;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
private QueueInterface<Execution> executionQueue;
|
||||
|
||||
public void restartFailedThenSuccess() throws Exception {
|
||||
Flow flow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests", "restart_last_failed").orElseThrow();
|
||||
|
||||
Execution firstExecution = runnerUtils.runOne(MAIN_TENANT, flow.getNamespace(), flow.getId(), Duration.ofSeconds(60));
|
||||
Execution firstExecution = runnerUtils.runOne(MAIN_TENANT, flow.getNamespace(), flow.getId());
|
||||
|
||||
assertThat(firstExecution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(firstExecution.getTaskRunList()).hasSize(3);
|
||||
assertThat(firstExecution.getTaskRunList().get(2).getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// wait
|
||||
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(3);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
|
||||
execution -> execution.getState().getCurrent() == State.Type.SUCCESS && execution.getId().equals(firstExecution.getId()),
|
||||
throwRunnable(() -> {
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(3);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
|
||||
executionQueue.emit(restartedExec);
|
||||
}),
|
||||
Duration.ofSeconds(60)
|
||||
restartedExec
|
||||
);
|
||||
|
||||
assertThat(finishedRestartedExecution).isNotNull();
|
||||
@@ -93,19 +75,16 @@ public class RestartCaseTest {
|
||||
assertThat(firstExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// wait
|
||||
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
|
||||
execution -> execution.getState().getCurrent() == State.Type.FAILED && execution.getTaskRunList().getFirst().getAttempts().size() == 2,
|
||||
throwRunnable(() -> {
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
executionQueue.emit(restartedExec);
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
}),
|
||||
Duration.ofSeconds(60)
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(1);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
|
||||
execution -> execution.getState().getCurrent() == State.Type.FAILED && execution.getTaskRunList().getFirst().getAttempts().size() == 2,
|
||||
restartedExec
|
||||
);
|
||||
|
||||
assertThat(finishedRestartedExecution).isNotNull();
|
||||
@@ -128,19 +107,16 @@ public class RestartCaseTest {
|
||||
assertThat(firstExecution.getTaskRunList().get(3).getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// wait
|
||||
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
|
||||
execution -> execution.getState().getCurrent() == State.Type.FAILED && execution.findTaskRunsByTaskId("failStep").stream().findFirst().get().getAttempts().size() == 2,
|
||||
throwRunnable(() -> {
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
executionQueue.emit(restartedExec);
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(4);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
}),
|
||||
Duration.ofSeconds(60)
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(4);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
|
||||
execution -> execution.getState().getCurrent() == State.Type.FAILED && execution.findTaskRunsByTaskId("failStep").stream().findFirst().get().getAttempts().size() == 2,
|
||||
restartedExec
|
||||
);
|
||||
|
||||
assertThat(finishedRestartedExecution).isNotNull();
|
||||
@@ -163,21 +139,19 @@ public class RestartCaseTest {
|
||||
assertThat(firstExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
|
||||
// wait
|
||||
Execution restartedExec = executionService.replay(firstExecution, firstExecution.findTaskRunByTaskIdAndValue("2_end", List.of()).getId(), null);
|
||||
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
assertThat(restartedExec.getState().getHistories()).hasSize(4);
|
||||
assertThat(restartedExec.getTaskRunList()).hasSize(20);
|
||||
assertThat(restartedExec.getTaskRunList().get(19).getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
|
||||
assertThat(restartedExec.getId()).isNotEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getTaskRunList().get(1).getId()).isNotEqualTo(firstExecution.getTaskRunList().get(1).getId());
|
||||
Execution finishedRestartedExecution = runnerUtils.awaitChildExecution(
|
||||
flow,
|
||||
firstExecution,
|
||||
throwRunnable(() -> {
|
||||
Execution restartedExec = executionService.replay(firstExecution, firstExecution.findTaskRunByTaskIdAndValue("2_end", List.of()).getId(), null);
|
||||
executionQueue.emit(restartedExec);
|
||||
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
assertThat(restartedExec.getState().getHistories()).hasSize(4);
|
||||
assertThat(restartedExec.getTaskRunList()).hasSize(20);
|
||||
assertThat(restartedExec.getTaskRunList().get(19).getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
|
||||
assertThat(restartedExec.getId()).isNotEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getTaskRunList().get(1).getId()).isNotEqualTo(firstExecution.getTaskRunList().get(1).getId());
|
||||
}),
|
||||
restartedExec,
|
||||
Duration.ofSeconds(60)
|
||||
);
|
||||
|
||||
@@ -195,71 +169,58 @@ public class RestartCaseTest {
|
||||
Execution restart = executionService.restart(execution, null);
|
||||
assertThat(restart.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
|
||||
Execution restartEnded = runnerUtils.awaitExecution(
|
||||
Execution restartEnded = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getState().getCurrent() == State.Type.FAILED,
|
||||
throwRunnable(() -> executionQueue.emit(restart)),
|
||||
Duration.ofSeconds(120)
|
||||
restart,
|
||||
Duration.ofSeconds(60)
|
||||
);
|
||||
|
||||
assertThat(restartEnded.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
Execution newRestart = executionService.restart(restartEnded, null);
|
||||
|
||||
restartEnded = runnerUtils.awaitExecution(
|
||||
restartEnded = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getState().getCurrent() == State.Type.FAILED,
|
||||
throwRunnable(() -> executionQueue.emit(newRestart)),
|
||||
Duration.ofSeconds(120)
|
||||
newRestart,
|
||||
Duration.ofSeconds(60)
|
||||
);
|
||||
|
||||
assertThat(restartEnded.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
}
|
||||
|
||||
public void restartSubflow() throws Exception {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
Flux<Execution> receiveSubflows = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution subflowExecution = either.getLeft();
|
||||
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isFailed()) {
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "restart-parent");
|
||||
assertThat(execution.getTaskRunList()).hasSize(3);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// here we must have 1 failed subflows
|
||||
assertTrue(countDownLatch.await(1, TimeUnit.MINUTES));
|
||||
receiveSubflows.blockLast();
|
||||
runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().isFailed(), MAIN_TENANT, "io.kestra.tests", "restart-child");
|
||||
|
||||
// there is 3 values so we must restart it 3 times to end the 3 subflows
|
||||
CountDownLatch successLatch = new CountDownLatch(3);
|
||||
receiveSubflows = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution subflowExecution = either.getLeft();
|
||||
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isSuccess()) {
|
||||
successLatch.countDown();
|
||||
}
|
||||
});
|
||||
Execution restarted1 = executionService.restart(execution, null);
|
||||
execution = runnerUtils.awaitExecution(
|
||||
execution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getState().getCurrent() == State.Type.FAILED && e.getFlowId().equals("restart-parent"),
|
||||
throwRunnable(() -> executionQueue.emit(restarted1)),
|
||||
restarted1,
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Execution restarted2 = executionService.restart(execution, null);
|
||||
execution = runnerUtils.awaitExecution(
|
||||
execution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getState().getCurrent() == State.Type.FAILED && e.getFlowId().equals("restart-parent"),
|
||||
throwRunnable(() -> executionQueue.emit(restarted2)),
|
||||
restarted2,
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
Execution restarted3 = executionService.restart(execution, null);
|
||||
execution = runnerUtils.awaitExecution(
|
||||
execution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getState().getCurrent() == State.Type.SUCCESS && e.getFlowId().equals("restart-parent"),
|
||||
throwRunnable(() -> executionQueue.emit(restarted3)),
|
||||
restarted3,
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
assertThat(execution.getTaskRunList()).hasSize(6);
|
||||
assertTrue(successLatch.await(1, TimeUnit.MINUTES));
|
||||
receiveSubflows.blockLast();
|
||||
|
||||
List<Execution> childExecutions = runnerUtils.awaitFlowExecutionNumber(3, MAIN_TENANT, "io.kestra.tests", "restart-child");
|
||||
List<Execution> successfulRestart = childExecutions.stream()
|
||||
.filter(e -> e.getState().getCurrent().equals(Type.SUCCESS)).toList();
|
||||
assertThat(successfulRestart).hasSize(3);
|
||||
}
|
||||
|
||||
public void restartFailedWithFinally() throws Exception {
|
||||
@@ -272,18 +233,15 @@ public class RestartCaseTest {
|
||||
assertThat(firstExecution.getTaskRunList().get(1).getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// wait
|
||||
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
|
||||
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess() && execution.getId().equals(firstExecution.getId()),
|
||||
throwRunnable(() -> {
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
|
||||
executionQueue.emit(restartedExec);
|
||||
}),
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
|
||||
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess(),
|
||||
restartedExec,
|
||||
Duration.ofSeconds(60)
|
||||
);
|
||||
|
||||
@@ -309,21 +267,18 @@ public class RestartCaseTest {
|
||||
assertThat(firstExecution.getTaskRunList().get(1).getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
|
||||
// wait
|
||||
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
|
||||
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess() && execution.getId().equals(firstExecution.getId()),
|
||||
throwRunnable(() -> {
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
Execution restartedExec = executionService.restart(firstExecution, null);
|
||||
assertThat(restartedExec).isNotNull();
|
||||
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(restartedExec.getParentId()).isNull();
|
||||
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
|
||||
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
|
||||
|
||||
executionQueue.emit(restartedExec);
|
||||
}),
|
||||
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
|
||||
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess(),
|
||||
restartedExec,
|
||||
Duration.ofSeconds(60)
|
||||
);
|
||||
|
||||
assertThat(finishedRestartedExecution).isNotNull();
|
||||
assertThat(finishedRestartedExecution.getId()).isEqualTo(firstExecution.getId());
|
||||
assertThat(finishedRestartedExecution.getParentId()).isNull();
|
||||
|
||||
@@ -98,7 +98,7 @@ class RunContextTest {
|
||||
private FlowInputOutput flowIO;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
protected LocalFlowRepositoryLoader repositoryLoader;
|
||||
|
||||
@@ -16,7 +16,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
@Singleton
|
||||
public class SLATestCase {
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
public void maxDurationSLAShouldFail() throws QueueException, TimeoutException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "sla-max-duration-fail");
|
||||
@@ -36,14 +36,14 @@ public class SLATestCase {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
public void executionConditionSLAShouldCancel() throws QueueException, TimeoutException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "CANCEL"));
|
||||
public void executionConditionSLAShouldCancel(String tenantId) throws QueueException, TimeoutException {
|
||||
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "CANCEL"));
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
|
||||
}
|
||||
|
||||
public void executionConditionSLAShouldLabel() throws QueueException, TimeoutException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "LABEL"));
|
||||
public void executionConditionSLAShouldLabel(String tenantId) throws QueueException, TimeoutException {
|
||||
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "LABEL"));
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(execution.getLabels()).contains(new Label("sla", "violated"));
|
||||
|
||||
@@ -3,54 +3,31 @@ package io.kestra.core.runners;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.State.Type;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@Singleton
|
||||
public class ScheduleDateCaseTest {
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
protected QueueInterface<Execution> executionQueue;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
public void shouldScheduleOnDate() throws QueueException, InterruptedException {
|
||||
public void shouldScheduleOnDate(String tenantId) throws QueueException {
|
||||
ZonedDateTime scheduleOn = ZonedDateTime.now().plusSeconds(1);
|
||||
Flow flow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests", "minimal").orElseThrow();
|
||||
Flow flow = flowRepository.findById(tenantId, "io.kestra.tests", "minimal").orElseThrow();
|
||||
Execution execution = Execution.newExecution(flow, null, null, Optional.of(scheduleOn));
|
||||
this.executionQueue.emit(execution);
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.CREATED);
|
||||
assertThat(execution.getScheduleDate()).isEqualTo(scheduleOn.toInstant());
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.CREATED);
|
||||
|
||||
CountDownLatch latch1 = new CountDownLatch(1);
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
|
||||
if (e.getLeft().getId().equals(execution.getId())) {
|
||||
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
|
||||
latch1.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
assertTrue(latch1.await(1, TimeUnit.MINUTES));
|
||||
receive.blockLast();
|
||||
runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ public class SkipExecutionCaseTest {
|
||||
protected QueueInterface<Execution> executionQueue;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private ExecutionRepositoryInterface executionRepository;
|
||||
|
||||
@@ -30,7 +30,7 @@ public class TaskCacheTest {
|
||||
static final AtomicInteger COUNTER = new AtomicInteger(0);
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@BeforeEach
|
||||
void resetCounter() {
|
||||
|
||||
@@ -33,7 +33,7 @@ public class TaskWithAllowFailureTest {
|
||||
private FlowInputOutput flowIO;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/task-allow-failure-runnable.yml")
|
||||
|
||||
@@ -9,6 +9,7 @@ import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import jakarta.inject.Inject;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
@@ -34,7 +35,7 @@ public class TaskWithAllowWarningTest {
|
||||
private FlowInputOutput flowIO;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/task-allow-warning-runnable.yml")
|
||||
@@ -54,6 +55,7 @@ public class TaskWithAllowWarningTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
@Disabled("This test does not test failing in subflow foreach as the subflow is not called, needs to be rework before reactivation")
|
||||
@LoadFlows({"flows/valids/task-allow-warning-executable-foreachitem.yml"})
|
||||
void executableTask_ForEachItem() throws TimeoutException, QueueException, URISyntaxException, IOException {
|
||||
URI file = storageUpload();
|
||||
|
||||
@@ -18,6 +18,7 @@ import org.assertj.core.api.AbstractObjectAssert;
|
||||
import org.assertj.core.api.ObjectAssert;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.List;
|
||||
@@ -37,7 +38,7 @@ class TestSuiteTest {
|
||||
protected QueueInterface<Execution> executionQueue;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
protected FlowRepositoryInterface flowRepository;
|
||||
@@ -146,7 +147,7 @@ class TestSuiteTest {
|
||||
.state(new State())
|
||||
.build();
|
||||
|
||||
return runnerUtils.runOne(execution, flow, null);
|
||||
return runnerUtils.runOne(execution, flow, Duration.ofSeconds(10));
|
||||
}
|
||||
|
||||
private static AbstractObjectAssert<?, Object> assertOutputForTask(Execution executionResult, String taskId) {
|
||||
|
||||
@@ -14,10 +14,13 @@ import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
import org.junit.jupiter.api.TestInstance.Lifecycle;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
@TestInstance(Lifecycle.PER_CLASS)
|
||||
class DateFilterTest {
|
||||
public static final ZonedDateTime NOW = ZonedDateTime.parse("2013-09-08T16:19:12.123456+01");
|
||||
|
||||
|
||||
@@ -14,10 +14,14 @@ public class FunctionTestUtils {
|
||||
}
|
||||
|
||||
public static Map<String, Object> getVariables(String namespace) {
|
||||
return getVariables(MAIN_TENANT, namespace);
|
||||
}
|
||||
|
||||
public static Map<String, Object> getVariables(String tenantId, String namespace) {
|
||||
return Map.of(
|
||||
"flow", Map.of(
|
||||
"id", "kv",
|
||||
"tenantId", MAIN_TENANT,
|
||||
"tenantId", tenantId,
|
||||
"namespace", namespace)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.storages.kv.InternalKVStore;
|
||||
import io.kestra.core.storages.kv.KVStore;
|
||||
import io.kestra.core.storages.kv.KVValueAndMetadata;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
@@ -29,18 +30,14 @@ public class KvFunctionTest {
|
||||
@Inject
|
||||
VariableRenderer variableRenderer;
|
||||
|
||||
@BeforeEach
|
||||
void reset() throws IOException {
|
||||
storageInterface.deleteByPrefix(MAIN_TENANT, null, URI.create(StorageContext.kvPrefix("io.kestra.tests")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldGetValueFromKVGivenExistingKey() throws IllegalVariableEvaluationException, IOException {
|
||||
// Given
|
||||
KVStore kv = new InternalKVStore(MAIN_TENANT, "io.kestra.tests", storageInterface);
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
KVStore kv = new InternalKVStore(tenant, "io.kestra.tests", storageInterface);
|
||||
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
|
||||
|
||||
Map<String, Object> variables = getVariables("io.kestra.tests");
|
||||
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
|
||||
|
||||
// When
|
||||
String rendered = variableRenderer.render("{{ kv('my-key') }}", variables);
|
||||
@@ -52,13 +49,14 @@ public class KvFunctionTest {
|
||||
@Test
|
||||
void shouldGetValueFromKVGivenExistingKeyWithInheritance() throws IllegalVariableEvaluationException, IOException {
|
||||
// Given
|
||||
KVStore kv = new InternalKVStore(MAIN_TENANT, "my.company", storageInterface);
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
KVStore kv = new InternalKVStore(tenant, "my.company", storageInterface);
|
||||
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
|
||||
|
||||
KVStore firstKv = new InternalKVStore(MAIN_TENANT, "my", storageInterface);
|
||||
KVStore firstKv = new InternalKVStore(tenant, "my", storageInterface);
|
||||
firstKv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "firstValue")));
|
||||
|
||||
Map<String, Object> variables = getVariables("my.company.team");
|
||||
Map<String, Object> variables = getVariables(tenant, "my.company.team");
|
||||
|
||||
// When
|
||||
String rendered = variableRenderer.render("{{ kv('my-key') }}", variables);
|
||||
@@ -70,10 +68,11 @@ public class KvFunctionTest {
|
||||
@Test
|
||||
void shouldNotGetValueFromKVWithGivenNamespaceAndInheritance() throws IOException {
|
||||
// Given
|
||||
KVStore kv = new InternalKVStore(MAIN_TENANT, "kv", storageInterface);
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
KVStore kv = new InternalKVStore(tenant, "kv", storageInterface);
|
||||
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
|
||||
|
||||
Map<String, Object> variables = getVariables("my.company.team");
|
||||
Map<String, Object> variables = getVariables(tenant, "my.company.team");
|
||||
|
||||
// When
|
||||
Assertions.assertThrows(IllegalVariableEvaluationException.class, () ->
|
||||
@@ -83,10 +82,11 @@ public class KvFunctionTest {
|
||||
@Test
|
||||
void shouldGetValueFromKVGivenExistingAndNamespace() throws IllegalVariableEvaluationException, IOException {
|
||||
// Given
|
||||
KVStore kv = new InternalKVStore(MAIN_TENANT, "kv", storageInterface);
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
KVStore kv = new InternalKVStore(tenant, "kv", storageInterface);
|
||||
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
|
||||
|
||||
Map<String, Object> variables = getVariables("io.kestra.tests");
|
||||
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
|
||||
|
||||
// When
|
||||
String rendered = variableRenderer.render("{{ kv('my-key', namespace='kv') }}", variables);
|
||||
@@ -98,7 +98,8 @@ public class KvFunctionTest {
|
||||
@Test
|
||||
void shouldGetEmptyGivenNonExistingKeyAndErrorOnMissingFalse() throws IllegalVariableEvaluationException {
|
||||
// Given
|
||||
Map<String, Object> variables = getVariables("io.kestra.tests");
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
|
||||
|
||||
// When
|
||||
String rendered = variableRenderer.render("{{ kv('my-key', errorOnMissing=false) }}", variables);
|
||||
@@ -110,7 +111,8 @@ public class KvFunctionTest {
|
||||
@Test
|
||||
void shouldFailGivenNonExistingKeyAndErrorOnMissingTrue() {
|
||||
// Given
|
||||
Map<String, Object> variables = getVariables("io.kestra.tests");
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
|
||||
|
||||
// When
|
||||
IllegalVariableEvaluationException exception = Assertions.assertThrows(IllegalVariableEvaluationException.class, () -> {
|
||||
@@ -124,7 +126,8 @@ public class KvFunctionTest {
|
||||
@Test
|
||||
void shouldFailGivenNonExistingKeyUsingDefaults() {
|
||||
// Given
|
||||
Map<String, Object> variables = getVariables("io.kestra.tests");
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
|
||||
// When
|
||||
IllegalVariableEvaluationException exception = Assertions.assertThrows(IllegalVariableEvaluationException.class, () -> {
|
||||
variableRenderer.render("{{ kv('my-key') }}", variables);
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
package io.kestra.core.runners.pebble.functions;
|
||||
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.runners.VariableRenderer;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
public class NanoIDFuntionTest {
|
||||
|
||||
@Inject
|
||||
VariableRenderer variableRenderer;
|
||||
|
||||
@Test
|
||||
void checkStandardNanoId() throws Exception {
|
||||
String rendered =
|
||||
variableRenderer.render(
|
||||
"{{ nanoId() }}", Collections.emptyMap());
|
||||
assertThat(!rendered.isEmpty()).as(rendered).isTrue();
|
||||
assertThat(rendered.length()).isEqualTo(21L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void checkDifferentLength() throws Exception {
|
||||
String rendered =
|
||||
variableRenderer.render(
|
||||
"{{ nanoId(length) }}", Map.of("length", 8L));
|
||||
assertThat(!rendered.isEmpty()).as(rendered).isTrue();
|
||||
assertThat(rendered.length()).isEqualTo(8L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void checkDifferentAlphabet() throws Exception {
|
||||
String rendered =
|
||||
variableRenderer.render(
|
||||
"{{ nanoId(length,alphabet) }}", Map.of("length", 21L, "alphabet", ":;<=>?@"));
|
||||
assertThat(!rendered.isEmpty()).as(rendered).isTrue();
|
||||
assertThat(rendered.length()).isEqualTo(21L);
|
||||
for (char c : rendered.toCharArray()) {
|
||||
assertThat(c).isGreaterThanOrEqualTo(':');
|
||||
assertThat(c).isLessThanOrEqualTo('@');
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.runners.VariableRenderer;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.test.annotation.MockBean;
|
||||
@@ -41,7 +41,7 @@ public class SecretFunctionTest {
|
||||
QueueInterface<LogEntry> logQueue;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private SecretService secretService;
|
||||
|
||||
@@ -21,9 +21,11 @@ import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.time.temporal.TemporalUnit;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -91,7 +93,7 @@ class InternalKVStoreTest {
|
||||
|
||||
String description = "myDescription";
|
||||
kv.put(TEST_KV_KEY, new KVValueAndMetadata(new KVMetadata(description, Duration.ofMinutes(5)), complexValue));
|
||||
kv.put("key-without-expiration", new KVValueAndMetadata(new KVMetadata(null, null), complexValue));
|
||||
kv.put("key-without-expiration", new KVValueAndMetadata(new KVMetadata(null, (Duration) null), complexValue));
|
||||
kv.put("expired-key", new KVValueAndMetadata(new KVMetadata(null, Duration.ofMillis(1)), complexValue));
|
||||
|
||||
List<KVEntry> list = kv.listAll();
|
||||
@@ -213,6 +215,22 @@ class InternalKVStoreTest {
|
||||
// When
|
||||
Assertions.assertThrows(ResourceExpiredException.class, () -> kv.getValue(TEST_KV_KEY));
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldGetKVValueAndMetadata() throws IOException {
|
||||
// Given
|
||||
final InternalKVStore kv = kv();
|
||||
KVValueAndMetadata val = new KVValueAndMetadata(new KVMetadata(null, Duration.ofMinutes(5)), complexValue);
|
||||
kv.put(TEST_KV_KEY, val);
|
||||
|
||||
// When
|
||||
Optional<KVValueAndMetadata> result = kv.findMetadataAndValue(TEST_KV_KEY);
|
||||
|
||||
// Then
|
||||
Assertions.assertEquals(val.value(), result.get().value());
|
||||
Assertions.assertEquals(val.metadata().getDescription(), result.get().metadata().getDescription());
|
||||
Assertions.assertEquals(val.metadata().getExpirationDate().truncatedTo(ChronoUnit.MILLIS), result.get().metadata().getExpirationDate().truncatedTo(ChronoUnit.MILLIS));
|
||||
}
|
||||
|
||||
@Test
|
||||
void illegalKey() {
|
||||
|
||||
@@ -5,6 +5,9 @@ import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThatObject;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
class VersionTest {
|
||||
|
||||
@Test
|
||||
@@ -27,27 +30,27 @@ class VersionTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldCreateVersionFromStringGivenMajorMinorIncrementVersion() {
|
||||
void shouldCreateVersionFromStringGivenMajorMinorPatchVersion() {
|
||||
Version version = Version.of("1.2.3");
|
||||
Assertions.assertEquals(1, version.majorVersion());
|
||||
Assertions.assertEquals(2, version.minorVersion());
|
||||
Assertions.assertEquals(3, version.incrementalVersion());
|
||||
Assertions.assertEquals(3, version.patchVersion());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldCreateVersionFromPrefixedStringGivenMajorMinorIncrementVersion() {
|
||||
void shouldCreateVersionFromPrefixedStringGivenMajorMinorPatchVersion() {
|
||||
Version version = Version.of("v1.2.3");
|
||||
Assertions.assertEquals(1, version.majorVersion());
|
||||
Assertions.assertEquals(2, version.minorVersion());
|
||||
Assertions.assertEquals(3, version.incrementalVersion());
|
||||
Assertions.assertEquals(3, version.patchVersion());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldCreateVersionFromStringGivenMajorMinorIncrementAndQualifierVersion() {
|
||||
void shouldCreateVersionFromStringGivenMajorMinorPatchAndQualifierVersion() {
|
||||
Version version = Version.of("1.2.3-SNAPSHOT");
|
||||
Assertions.assertEquals(1, version.majorVersion());
|
||||
Assertions.assertEquals(2, version.minorVersion());
|
||||
Assertions.assertEquals(3, version.incrementalVersion());
|
||||
Assertions.assertEquals(3, version.patchVersion());
|
||||
Assertions.assertEquals("SNAPSHOT", version.qualifier().toString());
|
||||
}
|
||||
|
||||
@@ -56,7 +59,7 @@ class VersionTest {
|
||||
Version version = Version.of("1.2.3-RC0-SNAPSHOT");
|
||||
Assertions.assertEquals(1, version.majorVersion());
|
||||
Assertions.assertEquals(2, version.minorVersion());
|
||||
Assertions.assertEquals(3, version.incrementalVersion());
|
||||
Assertions.assertEquals(3, version.patchVersion());
|
||||
Assertions.assertEquals("RC0-SNAPSHOT", version.qualifier().toString());
|
||||
}
|
||||
|
||||
@@ -82,13 +85,13 @@ class VersionTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldGetLatestVersionGivenMajorMinorIncrementalVersions() {
|
||||
void shouldGetLatestVersionGivenMajorMinorPatchVersions() {
|
||||
Version result = Version.getLatest(Version.of("1.0.9"), Version.of("1.0.10"), Version.of("1.0.11"));
|
||||
Assertions.assertEquals(Version.of("1.0.11"), result);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldGetOldestVersionGivenMajorMinorIncrementalVersions() {
|
||||
public void shouldGetOldestVersionGivenMajorMinorPatchVersions() {
|
||||
Version result = Version.getOldest(Version.of("1.0.9"), Version.of("1.0.10"), Version.of("1.0.11"));
|
||||
Assertions.assertEquals(Version.of("1.0.9"), result);
|
||||
}
|
||||
@@ -142,23 +145,49 @@ class VersionTest {
|
||||
|
||||
@Test
|
||||
public void shouldGetStableVersionGivenMajorMinorPatchVersion() {
|
||||
Assertions.assertEquals(Version.of("1.2.1"), Version.getStable(Version.of("1.2.1"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
|
||||
Assertions.assertEquals(Version.of("1.2.3"), Version.getStable(Version.of("1.2.0"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
|
||||
// Given
|
||||
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
|
||||
|
||||
// When - Then
|
||||
assertThatObject(Version.getStable(Version.of("1.2.1"), versions)).isEqualTo(Version.of("1.2.1"));
|
||||
assertThatObject(Version.getStable(Version.of("1.2.0"), versions)).isNull();
|
||||
assertThatObject(Version.getStable(Version.of("1.2.4"), versions)).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldGetStableVersionGivenMajorMinorVersion() {
|
||||
Assertions.assertEquals(Version.of("1.2.3"), Version.getStable(Version.of("1.2"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
|
||||
public void shouldGetStableGivenMajorAndMinorVersionOnly() {
|
||||
// Given
|
||||
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
|
||||
|
||||
// When - Then
|
||||
assertThatObject(Version.getStable(Version.of("1.2"), versions)).isEqualTo(Version.of("1.2.3"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldGetStableVersionGivenMajorVersion() {
|
||||
Assertions.assertEquals(Version.of("1.2.3"), Version.getStable(Version.of("1"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
|
||||
public void shouldGetStableGivenMajorVersionOnly() {
|
||||
// Given
|
||||
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
|
||||
|
||||
// When - Then
|
||||
assertThatObject(Version.getStable(Version.of("1"), versions)).isEqualTo(Version.of("1.2.3"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldGetNullForStableVersionGivenNoCompatibleVersions() {
|
||||
Assertions.assertNull(Version.getStable(Version.of("3.0"), List.of(Version.of("1.3.0"), Version.of("2.0.0"), Version.of("0.99.0"))));
|
||||
Assertions.assertNull(Version.getStable(Version.of("0.1"), List.of(Version.of("1.3.0"), Version.of("2.0.0"), Version.of("0.99.0"))));
|
||||
public void shouldGetNullForStableGivenMajorAndMinorVersionOnly() {
|
||||
// Given
|
||||
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
|
||||
|
||||
// When - Then
|
||||
assertThatObject(Version.getStable(Version.of("2.0"), versions)).isNull();
|
||||
assertThatObject(Version.getStable(Version.of("0.1"), versions)).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void shouldGetNullForStableGivenMajorVersionOnly() {
|
||||
// Given
|
||||
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
|
||||
|
||||
// When - Then
|
||||
assertThatObject(Version.getStable(Version.of("2"), versions)).isNull();
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
package io.kestra.plugin.core.execution;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.context.TestRunContextFactory;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.executions.statistics.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
@@ -8,7 +8,6 @@ import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.repositories.AbstractExecutionRepositoryTest;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface;
|
||||
import io.kestra.core.runners.RunContext;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -20,8 +19,10 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
class CountTest {
|
||||
|
||||
public static final String NAMESPACE = "io.kestra.unittest";
|
||||
@Inject
|
||||
RunContextFactory runContextFactory;
|
||||
TestRunContextFactory runContextFactory;
|
||||
|
||||
@Inject
|
||||
ExecutionRepositoryInterface executionRepository;
|
||||
@@ -29,8 +30,10 @@ class CountTest {
|
||||
|
||||
@Test
|
||||
void run() throws Exception {
|
||||
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
for (int i = 0; i < 28; i++) {
|
||||
executionRepository.save(AbstractExecutionRepositoryTest.builder(
|
||||
tenant,
|
||||
i < 5 ? State.Type.RUNNING : (i < 8 ? State.Type.FAILED : State.Type.SUCCESS),
|
||||
i < 4 ? "first" : (i < 10 ? "second" : "third")
|
||||
).build());
|
||||
@@ -49,7 +52,8 @@ class CountTest {
|
||||
.endDate(new Property<>("{{ now() }}"))
|
||||
.build();
|
||||
|
||||
RunContext runContext = TestsUtils.mockRunContext(runContextFactory, task, ImmutableMap.of("namespace", "io.kestra.unittest"));
|
||||
RunContext runContext = runContextFactory.of("id", NAMESPACE, tenant);
|
||||
|
||||
Count.Output run = task.run(runContext);
|
||||
|
||||
assertThat(run.getResults().size()).isEqualTo(2);
|
||||
|
||||
@@ -15,7 +15,7 @@ class ExitTest {
|
||||
@ExecuteFlow("flows/valids/exit.yaml")
|
||||
void shouldExitTheExecution(Execution execution) {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.WARNING);
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(2);
|
||||
assertThat(execution.getTaskRunList()).hasSize(2);
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.WARNING);
|
||||
}
|
||||
|
||||
@@ -23,9 +23,19 @@ class ExitTest {
|
||||
@ExecuteFlow("flows/valids/exit-killed.yaml")
|
||||
void shouldExitAndKillTheExecution(Execution execution) {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.KILLED);
|
||||
assertThat(execution.getTaskRunList().size()).isEqualTo(2);
|
||||
assertThat(execution.getTaskRunList()).hasSize(2);
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.KILLED);
|
||||
assertThat(execution.getTaskRunList().get(1).getState().getCurrent()).isEqualTo(State.Type.KILLED);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/exit-nested.yaml")
|
||||
void shouldExitAndFailNestedIf(Execution execution) {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(execution.getTaskRunList()).hasSize(4);
|
||||
assertThat(execution.findTaskRunsByTaskId("if_some_bool").getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(execution.findTaskRunsByTaskId("nested_bool_check").getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(execution.findTaskRunsByTaskId("nested_was_false").getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import java.time.Duration;
|
||||
import java.util.Map;
|
||||
@@ -19,7 +19,7 @@ import org.junit.jupiter.api.Test;
|
||||
public class FailTest {
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/fail-on-switch.yaml"})
|
||||
|
||||
@@ -5,7 +5,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.utils.Await;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -20,7 +20,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
class ResumeTest {
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private ExecutionRepositoryInterface executionRepository;
|
||||
|
||||
@@ -6,7 +6,7 @@ import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
@@ -14,7 +14,6 @@ import io.kestra.core.models.flows.State;
|
||||
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest(startRunner = true)
|
||||
@@ -22,7 +21,7 @@ class AllowFailureTest {
|
||||
@Inject
|
||||
private FlowInputOutput flowIO;
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/allow-failure.yaml")
|
||||
|
||||
@@ -7,7 +7,7 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -31,7 +31,7 @@ class CorrelationIdTest {
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
private QueueInterface<Execution> executionQueue;
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/subflow-parent.yaml",
|
||||
|
||||
@@ -12,7 +12,7 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.validations.ModelValidator;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.serializers.YamlParser;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -33,7 +33,7 @@ public class DagTest {
|
||||
ModelValidator modelValidator;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private FlowInputOutput flowIO;
|
||||
|
||||
@@ -4,6 +4,7 @@ import io.kestra.core.junit.annotations.ExecuteFlow;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import io.kestra.core.exceptions.InternalException;
|
||||
@@ -13,7 +14,6 @@ import io.kestra.core.models.executions.TaskRun;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
@@ -34,7 +34,7 @@ public class EachSequentialTest {
|
||||
QueueInterface<LogEntry> logQueue;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/each-sequential.yaml")
|
||||
@@ -92,7 +92,7 @@ public class EachSequentialTest {
|
||||
EachSequentialTest.eachNullTest(runnerUtils, logQueue);
|
||||
}
|
||||
|
||||
public static void eachNullTest(RunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
|
||||
public static void eachNullTest(TestRunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
|
||||
List<LogEntry> logs = new CopyOnWriteArrayList<>();
|
||||
Flux<LogEntry> receive = TestsUtils.receive(logQueue, either -> logs.add(either.getLeft()));
|
||||
|
||||
|
||||
@@ -6,9 +6,8 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.time.Duration;
|
||||
@@ -24,7 +23,7 @@ class FinallyTest {
|
||||
public static final String NAMESPACE = "io.kestra.tests";
|
||||
private static final String TENANT_ID = "tenant1";
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private FlowInputOutput flowIO;
|
||||
@@ -349,7 +348,7 @@ class FinallyTest {
|
||||
TENANT_ID,
|
||||
NAMESPACE, "finally-flow-error", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, Map.of("failed", true)),
|
||||
Duration.ofSeconds(60)
|
||||
Duration.ofSeconds(20)
|
||||
);
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(6);
|
||||
|
||||
@@ -4,96 +4,69 @@ import com.google.common.collect.ImmutableMap;
|
||||
import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@Singleton
|
||||
public class FlowCaseTest {
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
QueueInterface<Execution> executionQueue;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
public void waitSuccess() throws Exception {
|
||||
this.run("OK", State.Type.SUCCESS, State.Type.SUCCESS, 2, "default > amazing", true);
|
||||
}
|
||||
|
||||
public void waitFailed() throws Exception {
|
||||
this.run("THIRD", State.Type.FAILED, State.Type.FAILED, 4, "Error Trigger ! error-t1", true);
|
||||
public void waitFailed(String tenantId) throws Exception {
|
||||
this.run("THIRD", State.Type.FAILED, State.Type.FAILED, 4, "Error Trigger ! error-t1", true, tenantId);
|
||||
}
|
||||
|
||||
public void invalidOutputs() throws Exception {
|
||||
this.run("FIRST", State.Type.FAILED, State.Type.SUCCESS, 2, null, true);
|
||||
public void invalidOutputs(String tenantId) throws Exception {
|
||||
this.run("FIRST", State.Type.FAILED, State.Type.SUCCESS, 2, null, true, tenantId);
|
||||
}
|
||||
|
||||
public void noLabels() throws Exception {
|
||||
this.run("OK", State.Type.SUCCESS, State.Type.SUCCESS, 2, "default > amazing", false);
|
||||
public void noLabels(String tenantId) throws Exception {
|
||||
this.run("OK", State.Type.SUCCESS, State.Type.SUCCESS, 2, "default > amazing", false, tenantId);
|
||||
}
|
||||
|
||||
public void oldTaskName() throws Exception {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("minimal") && execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
Execution execution = runnerUtils.runOne(
|
||||
MAIN_TENANT,
|
||||
"io.kestra.tests",
|
||||
"subflow-old-task-name"
|
||||
);
|
||||
|
||||
countDownLatch.await(1, TimeUnit.MINUTES);
|
||||
receive.blockLast();
|
||||
Execution triggered = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().isTerminated(), MAIN_TENANT, "io.kestra.tests",
|
||||
"minimal");
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(1);
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.get().getId());
|
||||
assertThat(triggered.get().getTrigger().getType()).isEqualTo("io.kestra.core.tasks.flows.Subflow");
|
||||
assertThat(triggered.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(triggered.get().getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
|
||||
assertThat(triggered.get().getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
|
||||
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.getId());
|
||||
assertThat(triggered.getTrigger().getType()).isEqualTo("io.kestra.core.tasks.flows.Subflow");
|
||||
assertThat(triggered.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(triggered.getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
|
||||
assertThat(triggered.getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
|
||||
}
|
||||
|
||||
void run(String input, State.Type fromState, State.Type triggerState, int count, String outputs, boolean testInherited)
|
||||
throws Exception {
|
||||
run(input, fromState, triggerState, count, outputs, testInherited, MAIN_TENANT);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"ResultOfMethodCallIgnored", "unchecked"})
|
||||
void run(String input, State.Type fromState, State.Type triggerState, int count, String outputs, boolean testInherited) throws Exception {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("switch") && execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
void run(String input, State.Type fromState, State.Type triggerState, int count, String outputs, boolean testInherited, String tenantId) throws Exception {
|
||||
Execution execution = runnerUtils.runOne(
|
||||
MAIN_TENANT,
|
||||
tenantId,
|
||||
"io.kestra.tests",
|
||||
testInherited ? "task-flow" : "task-flow-inherited-labels",
|
||||
null,
|
||||
@@ -102,8 +75,8 @@ public class FlowCaseTest {
|
||||
testInherited ? List.of(new Label("mainFlowExecutionLabel", "execFoo")) : List.of()
|
||||
);
|
||||
|
||||
countDownLatch.await(1, TimeUnit.MINUTES);
|
||||
receive.blockLast();
|
||||
Execution triggered = runnerUtils.awaitFlowExecution(
|
||||
e -> e.getState().getCurrent().isTerminated(), tenantId, "io.kestra.tests", "switch");
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(1);
|
||||
assertThat(execution.getTaskRunList().getFirst().getAttempts()).hasSize(1);
|
||||
@@ -114,27 +87,27 @@ public class FlowCaseTest {
|
||||
assertThat(((Map<String, String>) execution.getTaskRunList().getFirst().getOutputs().get("outputs")).get("extracted")).contains(outputs);
|
||||
}
|
||||
|
||||
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.get().getId());
|
||||
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.getId());
|
||||
|
||||
if (outputs != null) {
|
||||
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("state")).isEqualTo(triggered.get().getState().getCurrent().name());
|
||||
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("state")).isEqualTo(triggered.getState().getCurrent().name());
|
||||
}
|
||||
|
||||
assertThat(triggered.get().getTrigger().getType()).isEqualTo("io.kestra.plugin.core.flow.Subflow");
|
||||
assertThat(triggered.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(triggered.get().getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
|
||||
assertThat(triggered.get().getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
|
||||
assertThat(triggered.getTrigger().getType()).isEqualTo("io.kestra.plugin.core.flow.Subflow");
|
||||
assertThat(triggered.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
|
||||
assertThat(triggered.getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
|
||||
assertThat(triggered.getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
|
||||
|
||||
assertThat(triggered.get().getTaskRunList()).hasSize(count);
|
||||
assertThat(triggered.get().getState().getCurrent()).isEqualTo(triggerState);
|
||||
assertThat(triggered.getTaskRunList()).hasSize(count);
|
||||
assertThat(triggered.getState().getCurrent()).isEqualTo(triggerState);
|
||||
|
||||
if (testInherited) {
|
||||
assertThat(triggered.get().getLabels().size()).isEqualTo(6);
|
||||
assertThat(triggered.get().getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("mainFlowExecutionLabel", "execFoo"), new Label("mainFlowLabel", "flowFoo"), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
|
||||
assertThat(triggered.getLabels().size()).isEqualTo(6);
|
||||
assertThat(triggered.getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("mainFlowExecutionLabel", "execFoo"), new Label("mainFlowLabel", "flowFoo"), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
|
||||
} else {
|
||||
assertThat(triggered.get().getLabels().size()).isEqualTo(4);
|
||||
assertThat(triggered.get().getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
|
||||
assertThat(triggered.get().getLabels()).doesNotContain(new Label("inherited", "label"));
|
||||
assertThat(triggered.getLabels().size()).isEqualTo(4);
|
||||
assertThat(triggered.getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
|
||||
assertThat(triggered.getLabels()).doesNotContain(new Label("inherited", "label"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,9 +5,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.TestInstance;
|
||||
|
||||
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
|
||||
@KestraTest(startRunner = true)
|
||||
class FlowTest {
|
||||
@Inject
|
||||
@@ -24,25 +22,25 @@ class FlowTest {
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/task-flow.yaml",
|
||||
"flows/valids/task-flow-inherited-labels.yaml",
|
||||
"flows/valids/switch.yaml"})
|
||||
"flows/valids/switch.yaml"}, tenantId = "tenant1")
|
||||
void waitFailed() throws Exception {
|
||||
flowCaseTest.waitFailed();
|
||||
flowCaseTest.waitFailed("tenant1");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/task-flow.yaml",
|
||||
@LoadFlows(value = {"flows/valids/task-flow.yaml",
|
||||
"flows/valids/task-flow-inherited-labels.yaml",
|
||||
"flows/valids/switch.yaml"})
|
||||
"flows/valids/switch.yaml"}, tenantId = "tenant2")
|
||||
void invalidOutputs() throws Exception {
|
||||
flowCaseTest.invalidOutputs();
|
||||
flowCaseTest.invalidOutputs("tenant2");
|
||||
}
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/task-flow.yaml",
|
||||
@LoadFlows(value = {"flows/valids/task-flow.yaml",
|
||||
"flows/valids/task-flow-inherited-labels.yaml",
|
||||
"flows/valids/switch.yaml"})
|
||||
"flows/valids/switch.yaml"}, tenantId = "tenant3")
|
||||
void noLabels() throws Exception {
|
||||
flowCaseTest.noLabels();
|
||||
flowCaseTest.noLabels("tenant3");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -4,20 +4,17 @@ import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.ArrayListTotal;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.services.ExecutionService;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
@@ -31,34 +28,25 @@ import java.nio.file.Files;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static io.kestra.core.models.flows.State.Type.FAILED;
|
||||
import static io.kestra.core.models.flows.State.Type.SUCCESS;
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@Slf4j
|
||||
@Singleton
|
||||
public class ForEachItemCaseTest {
|
||||
static final String TEST_NAMESPACE = "io.kestra.tests";
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
private QueueInterface<Execution> executionQueue;
|
||||
|
||||
@Inject
|
||||
private StorageInterface storageInterface;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private FlowInputOutput flowIO;
|
||||
@@ -66,28 +54,19 @@ public class ForEachItemCaseTest {
|
||||
@Inject
|
||||
private ExecutionService executionService;
|
||||
|
||||
@Inject
|
||||
private ExecutionRepositoryInterface executionRepository;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void forEachItem() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(26);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("for-each-item-subflow") && execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
URI file = storageUpload();
|
||||
public void forEachItem() throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = storageUpload(MAIN_TENANT);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
Duration.ofSeconds(30));
|
||||
|
||||
// we should have triggered 26 subflows
|
||||
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
|
||||
receive.blockLast();
|
||||
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow");
|
||||
|
||||
// assert on the main flow execution
|
||||
assertThat(execution.getTaskRunList()).hasSize(4);
|
||||
@@ -103,19 +82,20 @@ public class ForEachItemCaseTest {
|
||||
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
|
||||
|
||||
// assert on the last subflow execution
|
||||
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow");
|
||||
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.get().getTaskRunList()).hasSize(1);
|
||||
Optional<Label> correlationId = triggered.get().getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
|
||||
Execution triggered = triggeredExecs.getLast();
|
||||
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow");
|
||||
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.getTaskRunList()).hasSize(1);
|
||||
Optional<Label> correlationId = triggered.getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
|
||||
assertThat(correlationId.isPresent()).isTrue();
|
||||
assertThat(correlationId.get().value()).isEqualTo(execution.getId());
|
||||
}
|
||||
|
||||
public void forEachItemEmptyItems() throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = emptyItems();
|
||||
public void forEachItemEmptyItems(String tenantId) throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = emptyItems(tenantId);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item", null,
|
||||
Execution execution = runnerUtils.runOne(tenantId, TEST_NAMESPACE, "for-each-item", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
Duration.ofSeconds(30));
|
||||
|
||||
@@ -127,21 +107,8 @@ public class ForEachItemCaseTest {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void forEachItemNoWait() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(26);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("for-each-item-subflow-sleep")) {
|
||||
if (execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
URI file = storageUpload();
|
||||
public void forEachItemNoWait() throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = storageUpload(MAIN_TENANT);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString());
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-no-wait", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
@@ -149,7 +116,9 @@ public class ForEachItemCaseTest {
|
||||
|
||||
// assert that not all subflows ran (depending on the speed of execution, there can be some)
|
||||
// be careful that it's racy.
|
||||
assertThat(countDownLatch.getCount()).isGreaterThan(0L);
|
||||
ArrayListTotal<Execution> subFlowExecs = executionRepository.findByFlowId(MAIN_TENANT,
|
||||
TEST_NAMESPACE, "for-each-item-subflow-sleep", Pageable.UNPAGED);
|
||||
assertThat(subFlowExecs.size()).isLessThanOrEqualTo(26);
|
||||
|
||||
// assert on the main flow execution
|
||||
assertThat(execution.getTaskRunList()).hasSize(4);
|
||||
@@ -165,38 +134,27 @@ public class ForEachItemCaseTest {
|
||||
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
|
||||
|
||||
// wait for the 26 flows to ends
|
||||
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).as("Remaining count was " + countDownLatch.getCount()).isTrue();
|
||||
receive.blockLast();
|
||||
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow-sleep");
|
||||
Execution triggered = triggeredExecs.getLast();
|
||||
|
||||
// assert on the last subflow execution
|
||||
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow-sleep");
|
||||
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-no-wait/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.get().getTaskRunList()).hasSize(2);
|
||||
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow-sleep");
|
||||
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-no-wait/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.getTaskRunList()).hasSize(2);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void forEachItemFailed() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(26);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("for-each-item-subflow-failed") && execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
URI file = storageUpload();
|
||||
public void forEachItemFailed() throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = storageUpload(MAIN_TENANT);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString());
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-failed", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
Duration.ofSeconds(60));
|
||||
|
||||
// we should have triggered 26 subflows
|
||||
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
|
||||
receive.blockLast();
|
||||
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow-failed");
|
||||
Execution triggered = triggeredExecs.getLast();
|
||||
|
||||
// assert on the main flow execution
|
||||
assertThat(execution.getTaskRunList()).hasSize(3);
|
||||
@@ -212,34 +170,23 @@ public class ForEachItemCaseTest {
|
||||
assertThat(iterations.get("FAILED")).isEqualTo(26);
|
||||
|
||||
// assert on the last subflow execution
|
||||
assertThat(triggered.get().getState().getCurrent()).isEqualTo(FAILED);
|
||||
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow-failed");
|
||||
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-failed/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.get().getTaskRunList()).hasSize(1);
|
||||
assertThat(triggered.getState().getCurrent()).isEqualTo(FAILED);
|
||||
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow-failed");
|
||||
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-failed/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.getTaskRunList()).hasSize(1);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void forEachItemWithSubflowOutputs() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(26);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("for-each-item-outputs-subflow") && execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
URI file = storageUpload();
|
||||
public void forEachItemWithSubflowOutputs() throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = storageUpload(MAIN_TENANT);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString());
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-outputs", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
Duration.ofSeconds(30));
|
||||
|
||||
// we should have triggered 26 subflows
|
||||
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
|
||||
receive.blockLast();
|
||||
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-outputs-subflow");
|
||||
Execution triggered = triggeredExecs.getLast();
|
||||
|
||||
// assert on the main flow execution
|
||||
assertThat(execution.getTaskRunList()).hasSize(5);
|
||||
@@ -256,10 +203,10 @@ public class ForEachItemCaseTest {
|
||||
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
|
||||
|
||||
// assert on the last subflow execution
|
||||
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-outputs-subflow");
|
||||
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-outputs/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.get().getTaskRunList()).hasSize(1);
|
||||
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-outputs-subflow");
|
||||
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-outputs/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.getTaskRunList()).hasSize(1);
|
||||
|
||||
// asserts for subflow merged outputs
|
||||
Map<String, Object> mergeTaskOutputs = execution.getTaskRunList().get(3).getOutputs();
|
||||
@@ -272,84 +219,39 @@ public class ForEachItemCaseTest {
|
||||
}
|
||||
}
|
||||
|
||||
public void restartForEachItem() throws Exception {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(6);
|
||||
Flux<Execution> receiveSubflows = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution subflowExecution = either.getLeft();
|
||||
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isFailed()) {
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
URI file = storageUpload();
|
||||
public void restartForEachItem(String tenantId) throws Exception {
|
||||
URI file = storageUpload(tenantId);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 20);
|
||||
final Execution failedExecution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "restart-for-each-item", null,
|
||||
final Execution failedExecution = runnerUtils.runOne(tenantId, TEST_NAMESPACE, "restart-for-each-item", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
Duration.ofSeconds(30));
|
||||
assertThat(failedExecution.getTaskRunList()).hasSize(3);
|
||||
assertThat(failedExecution.getState().getCurrent()).isEqualTo(FAILED);
|
||||
|
||||
// here we must have 1 failed subflows
|
||||
assertTrue(countDownLatch.await(1, TimeUnit.MINUTES), "first run of flow should have FAILED");
|
||||
receiveSubflows.blockLast();
|
||||
|
||||
Await.until(
|
||||
() -> "first FAILED run of flow should have been persisted",
|
||||
() -> getPersistedExecution(MAIN_TENANT, failedExecution.getId())
|
||||
.map(exec -> exec.getState().getCurrent() == FAILED)
|
||||
.orElse(false),
|
||||
Duration.of(100, TimeUnit.MILLISECONDS.toChronoUnit()),
|
||||
Duration.of(10, TimeUnit.SECONDS.toChronoUnit())
|
||||
);
|
||||
|
||||
CountDownLatch successLatch = new CountDownLatch(6);
|
||||
receiveSubflows = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution subflowExecution = either.getLeft();
|
||||
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isSuccess()) {
|
||||
successLatch.countDown();
|
||||
}
|
||||
});
|
||||
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(6, tenantId, TEST_NAMESPACE, "restart-child");
|
||||
assertThat(triggeredExecs).extracting(e -> e.getState().getCurrent()).containsOnly(FAILED);
|
||||
|
||||
Execution restarted = executionService.restart(failedExecution, null);
|
||||
final Execution successExecution = runnerUtils.awaitExecution(
|
||||
final Execution successExecution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getState().getCurrent() == State.Type.SUCCESS && e.getFlowId().equals("restart-for-each-item"),
|
||||
throwRunnable(() -> executionQueue.emit(restarted)),
|
||||
Duration.ofSeconds(20)
|
||||
restarted
|
||||
);
|
||||
assertThat(successExecution.getTaskRunList()).hasSize(4);
|
||||
assertTrue(successLatch.await(1, TimeUnit.MINUTES), "second run of flow should have SUCCESS");
|
||||
receiveSubflows.blockLast();
|
||||
triggeredExecs = runnerUtils.awaitFlowExecutionNumber(6, tenantId, TEST_NAMESPACE, "restart-child");
|
||||
assertThat(triggeredExecs).extracting(e -> e.getState().getCurrent()).containsOnly(SUCCESS);
|
||||
}
|
||||
|
||||
private Optional<Execution> getPersistedExecution(String tenant, String executionId) {
|
||||
try {
|
||||
return Optional.of(executionService.getExecution(tenant, executionId, false));
|
||||
} catch (NoSuchElementException e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
public void forEachItemInIf() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(26);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("for-each-item-subflow") && execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
URI file = storageUpload();
|
||||
public void forEachItemInIf(String tenantId) throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = storageUpload(tenantId);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-in-if", null,
|
||||
Execution execution = runnerUtils.runOne(tenantId, TEST_NAMESPACE, "for-each-item-in-if", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
Duration.ofSeconds(30));
|
||||
|
||||
// we should have triggered 26 subflows
|
||||
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
|
||||
receive.blockLast();
|
||||
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, tenantId, TEST_NAMESPACE, "for-each-item-subflow");
|
||||
Execution triggered = triggeredExecs.getLast();
|
||||
|
||||
// assert on the main flow execution
|
||||
assertThat(execution.getTaskRunList()).hasSize(5);
|
||||
@@ -363,36 +265,25 @@ public class ForEachItemCaseTest {
|
||||
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
|
||||
|
||||
// assert on the last subflow execution
|
||||
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow");
|
||||
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-in-if/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.get().getTaskRunList()).hasSize(1);
|
||||
Optional<Label> correlationId = triggered.get().getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
|
||||
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow");
|
||||
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-in-if/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.getTaskRunList()).hasSize(1);
|
||||
Optional<Label> correlationId = triggered.getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
|
||||
assertThat(correlationId.isPresent()).isTrue();
|
||||
assertThat(correlationId.get().value()).isEqualTo(execution.getId());
|
||||
}
|
||||
|
||||
public void forEachItemWithAfterExecution() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(26);
|
||||
AtomicReference<Execution> triggered = new AtomicReference<>();
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("for-each-item-subflow-after-execution") && execution.getState().getCurrent().isTerminated()) {
|
||||
triggered.set(execution);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
URI file = storageUpload();
|
||||
public void forEachItemWithAfterExecution() throws TimeoutException, URISyntaxException, IOException, QueueException {
|
||||
URI file = storageUpload(MAIN_TENANT);
|
||||
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-after-execution", null,
|
||||
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
|
||||
Duration.ofSeconds(30));
|
||||
|
||||
// we should have triggered 26 subflows
|
||||
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
|
||||
receive.blockLast();
|
||||
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow-after-execution");
|
||||
Execution triggered = triggeredExecs.getLast();
|
||||
|
||||
// assert on the main flow execution
|
||||
assertThat(execution.getTaskRunList()).hasSize(5);
|
||||
@@ -408,33 +299,33 @@ public class ForEachItemCaseTest {
|
||||
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
|
||||
|
||||
// assert on the last subflow execution
|
||||
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow-after-execution");
|
||||
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-after-execution/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.get().getTaskRunList()).hasSize(2);
|
||||
Optional<Label> correlationId = triggered.get().getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
|
||||
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow-after-execution");
|
||||
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-after-execution/executions/.*/tasks/each-split/.*\\.txt");
|
||||
assertThat(triggered.getTaskRunList()).hasSize(2);
|
||||
Optional<Label> correlationId = triggered.getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
|
||||
assertThat(correlationId.isPresent()).isTrue();
|
||||
assertThat(correlationId.get().value()).isEqualTo(execution.getId());
|
||||
}
|
||||
|
||||
private URI storageUpload() throws URISyntaxException, IOException {
|
||||
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
|
||||
File tempFile = File.createTempFile("file", ".txt");
|
||||
|
||||
Files.write(tempFile.toPath(), content());
|
||||
|
||||
return storageInterface.put(
|
||||
MAIN_TENANT,
|
||||
tenantId,
|
||||
null,
|
||||
new URI("/file/storage/file.txt"),
|
||||
new FileInputStream(tempFile)
|
||||
);
|
||||
}
|
||||
|
||||
private URI emptyItems() throws URISyntaxException, IOException {
|
||||
private URI emptyItems(String tenantId) throws URISyntaxException, IOException {
|
||||
File tempFile = File.createTempFile("file", ".txt");
|
||||
|
||||
return storageInterface.put(
|
||||
MAIN_TENANT,
|
||||
tenantId,
|
||||
null,
|
||||
new URI("/file/storage/file.txt"),
|
||||
new FileInputStream(tempFile)
|
||||
|
||||
@@ -6,7 +6,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -23,7 +23,7 @@ class IfTest {
|
||||
private static final String TENANT_ID = "true";
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/if-condition.yaml"}, tenantId = TENANT_ID)
|
||||
|
||||
@@ -3,10 +3,9 @@ package io.kestra.plugin.core.flow;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
@@ -16,7 +15,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class LoopUntilCaseTest {
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
public void waitfor() throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "waitfor");
|
||||
|
||||
@@ -10,7 +10,7 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.concurrent.TimeoutException;
|
||||
@KestraTest(startRunner = true)
|
||||
class ParallelTest {
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private FlowInputOutput flowIO;
|
||||
|
||||
@@ -8,10 +8,8 @@ import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.services.ExecutionService;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.micronaut.http.MediaType;
|
||||
@@ -23,7 +21,6 @@ import io.micronaut.http.server.netty.multipart.NettyCompletedFileUpload;
|
||||
import io.netty.buffer.Unpooled;
|
||||
import io.netty.handler.codec.http.multipart.*;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
@@ -40,7 +37,6 @@ import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static io.kestra.core.utils.Rethrow.throwRunnable;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
@@ -48,7 +44,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
public class PauseTest {
|
||||
|
||||
@Inject
|
||||
RunnerUtils runnerUtils;
|
||||
TestRunnerUtils runnerUtils;
|
||||
@Inject
|
||||
Suite suite;
|
||||
|
||||
@@ -156,11 +152,7 @@ public class PauseTest {
|
||||
@Inject
|
||||
StorageInterface storageInterface;
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
protected QueueInterface<Execution> executionQueue;
|
||||
|
||||
public void run(RunnerUtils runnerUtils) throws Exception {
|
||||
public void run(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-test", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
Flow flow = flowRepository.findByExecution(execution);
|
||||
@@ -176,16 +168,15 @@ public class PauseTest {
|
||||
State.Type.RUNNING
|
||||
);
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
execution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
throwRunnable(() -> executionQueue.emit(restarted)),
|
||||
Duration.ofSeconds(5)
|
||||
restarted
|
||||
);
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
public void runDelay(RunnerUtils runnerUtils) throws Exception {
|
||||
public void runDelay(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-delay", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
|
||||
@@ -193,9 +184,9 @@ public class PauseTest {
|
||||
assertThat(execution.getTaskRunList()).hasSize(1);
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
() -> {},
|
||||
Duration.ofSeconds(5)
|
||||
e ->
|
||||
e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
execution
|
||||
);
|
||||
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).isEqualTo(1L);
|
||||
@@ -203,7 +194,7 @@ public class PauseTest {
|
||||
assertThat(execution.getTaskRunList()).hasSize(3);
|
||||
}
|
||||
|
||||
public void runDurationFromInput(RunnerUtils runnerUtils) throws Exception {
|
||||
public void runDurationFromInput(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-duration-from-input", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
|
||||
@@ -211,9 +202,9 @@ public class PauseTest {
|
||||
assertThat(execution.getTaskRunList()).hasSize(1);
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
() -> {},
|
||||
Duration.ofSeconds(5)
|
||||
e ->
|
||||
e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
execution
|
||||
);
|
||||
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).isEqualTo(1L);
|
||||
@@ -221,14 +212,14 @@ public class PauseTest {
|
||||
assertThat(execution.getTaskRunList()).hasSize(3);
|
||||
}
|
||||
|
||||
public void runParallelDelay(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
public void runParallelDelay(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "each-parallel-pause", Duration.ofSeconds(30));
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
assertThat(execution.getTaskRunList()).hasSize(7);
|
||||
}
|
||||
|
||||
public void runTimeout(RunnerUtils runnerUtils) throws Exception {
|
||||
public void runTimeout(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-timeout", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
|
||||
@@ -237,8 +228,7 @@ public class PauseTest {
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.FAILED,
|
||||
() -> {},
|
||||
Duration.ofSeconds(5)
|
||||
execution
|
||||
);
|
||||
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).as("Task runs were: " + execution.getTaskRunList().toString()).isEqualTo(1L);
|
||||
@@ -247,7 +237,7 @@ public class PauseTest {
|
||||
assertThat(execution.getTaskRunList()).hasSize(1);
|
||||
}
|
||||
|
||||
public void runTimeoutAllowFailure(RunnerUtils runnerUtils) throws Exception {
|
||||
public void runTimeoutAllowFailure(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-timeout-allow-failure", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
|
||||
@@ -256,8 +246,7 @@ public class PauseTest {
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.WARNING,
|
||||
() -> {},
|
||||
Duration.ofSeconds(5)
|
||||
execution
|
||||
);
|
||||
|
||||
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).as("Task runs were: " + execution.getTaskRunList().toString()).isEqualTo(1L);
|
||||
@@ -266,7 +255,7 @@ public class PauseTest {
|
||||
assertThat(execution.getTaskRunList()).hasSize(3);
|
||||
}
|
||||
|
||||
public void runEmptyTasks(RunnerUtils runnerUtils) throws Exception {
|
||||
public void runEmptyTasks(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause_no_tasks", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
Flow flow = flowRepository.findByExecution(execution);
|
||||
@@ -282,17 +271,16 @@ public class PauseTest {
|
||||
State.Type.RUNNING
|
||||
);
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
execution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
throwRunnable(() -> executionQueue.emit(restarted)),
|
||||
Duration.ofSeconds(10)
|
||||
restarted
|
||||
);
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void runOnResume(RunnerUtils runnerUtils) throws Exception {
|
||||
public void runOnResume(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause_on_resume", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
Flow flow = flowRepository.findByExecution(execution);
|
||||
@@ -315,10 +303,9 @@ public class PauseTest {
|
||||
null
|
||||
).block();
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
execution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
throwRunnable(() -> executionQueue.emit(restarted)),
|
||||
Duration.ofSeconds(10)
|
||||
restarted
|
||||
);
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
@@ -329,7 +316,7 @@ public class PauseTest {
|
||||
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get(MAIN_TENANT, null, URI.create((String) outputs.get("data")))))).isEqualTo(executionId);
|
||||
}
|
||||
|
||||
public void runOnResumeMissingInputs(String tenantId, RunnerUtils runnerUtils) throws Exception {
|
||||
public void runOnResumeMissingInputs(String tenantId, TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(tenantId, "io.kestra.tests", "pause_on_resume", null, null, Duration.ofSeconds(30));
|
||||
Flow flow = flowRepository.findByExecution(execution);
|
||||
|
||||
@@ -344,7 +331,7 @@ public class PauseTest {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void runOnResumeOptionalInputs(RunnerUtils runnerUtils) throws Exception {
|
||||
public void runOnResumeOptionalInputs(TestRunnerUtils runnerUtils) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause_on_resume_optional", null, null, Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
Flow flow = flowRepository.findByExecution(execution);
|
||||
@@ -353,10 +340,9 @@ public class PauseTest {
|
||||
|
||||
Execution restarted = executionService.resume(execution, flow, State.Type.RUNNING, Pause.Resumed.now());
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
execution = runnerUtils.emitAndAwaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
|
||||
throwRunnable(() -> executionQueue.emit(restarted)),
|
||||
Duration.ofSeconds(10)
|
||||
restarted
|
||||
);
|
||||
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
@@ -365,7 +351,7 @@ public class PauseTest {
|
||||
assertThat(outputs.get("asked")).isEqualTo("MISSING");
|
||||
}
|
||||
|
||||
public void runDurationWithBehavior(String tenantId, RunnerUtils runnerUtils, Pause.Behavior behavior) throws Exception {
|
||||
public void runDurationWithBehavior(String tenantId, TestRunnerUtils runnerUtils, Pause.Behavior behavior) throws Exception {
|
||||
Execution execution = runnerUtils.runOneUntilPaused(tenantId, "io.kestra.tests", "pause-behavior", null, (unused, _unused) -> Map.of("behavior", behavior), Duration.ofSeconds(30));
|
||||
String executionId = execution.getId();
|
||||
|
||||
@@ -374,8 +360,7 @@ public class PauseTest {
|
||||
|
||||
execution = runnerUtils.awaitExecution(
|
||||
e -> e.getId().equals(executionId) && e.getState().getCurrent().isTerminated(),
|
||||
() -> {},
|
||||
Duration.ofSeconds(5)
|
||||
execution
|
||||
);
|
||||
|
||||
State.Type finalState = behavior == Pause.Behavior.RESUME ? State.Type.SUCCESS : behavior.mapToState();
|
||||
|
||||
@@ -1,27 +1,22 @@
|
||||
package io.kestra.plugin.core.flow;
|
||||
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import jakarta.inject.Singleton;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@Slf4j
|
||||
@@ -29,11 +24,11 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||
public class RetryCaseTest {
|
||||
|
||||
@Inject
|
||||
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||
private QueueInterface<Execution> executionQueue;
|
||||
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
private ExecutionRepositoryInterface executionRepository;
|
||||
@Inject
|
||||
private FlowRepositoryInterface flowRepository;
|
||||
|
||||
public void retrySuccess(Execution execution) {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.WARNING);
|
||||
@@ -72,117 +67,53 @@ public class RetryCaseTest {
|
||||
|
||||
}
|
||||
|
||||
public void retryNewExecutionTaskDuration() throws TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(3);
|
||||
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("retry-new-execution-task-duration") && execution.getState().getCurrent().isTerminated()) {
|
||||
List<State.Type> stateHistoryList = stateHistory.get();
|
||||
stateHistoryList.add(execution.getState().getCurrent());
|
||||
stateHistory.set(stateHistoryList);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
runnerUtils.runOne(
|
||||
MAIN_TENANT,
|
||||
"io.kestra.tests",
|
||||
"retry-new-execution-task-duration",
|
||||
null,
|
||||
null
|
||||
);
|
||||
|
||||
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
|
||||
receive.blockLast();
|
||||
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
|
||||
public void retryNewExecutionTaskDuration(String tenant) throws TimeoutException, QueueException {
|
||||
var flow = flowRepository
|
||||
.findById(tenant, "io.kestra.tests", "retry-new-execution-task-duration")
|
||||
.orElseThrow();
|
||||
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
|
||||
}
|
||||
|
||||
public void retryNewExecutionTaskAttempts() throws TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(3);
|
||||
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("retry-new-execution-task-attempts") && execution.getState().getCurrent().isTerminated()) {
|
||||
List<State.Type> stateHistoryList = stateHistory.get();
|
||||
stateHistoryList.add(execution.getState().getCurrent());
|
||||
stateHistory.set(stateHistoryList);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
runnerUtils.runOne(
|
||||
MAIN_TENANT,
|
||||
"io.kestra.tests",
|
||||
"retry-new-execution-task-attempts",
|
||||
null,
|
||||
null
|
||||
);
|
||||
|
||||
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
|
||||
receive.blockLast();
|
||||
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
|
||||
public void retryNewExecutionTaskAttempts(String tenant) throws TimeoutException, QueueException {
|
||||
var flow = flowRepository
|
||||
.findById(tenant, "io.kestra.tests", "retry-new-execution-task-attempts")
|
||||
.orElseThrow();
|
||||
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
|
||||
}
|
||||
|
||||
public void retryNewExecutionFlowDuration() throws TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(3);
|
||||
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("retry-new-execution-flow-duration") && execution.getState().getCurrent().isTerminated()) {
|
||||
List<State.Type> stateHistoryList = stateHistory.get();
|
||||
stateHistoryList.add(execution.getState().getCurrent());
|
||||
stateHistory.set(stateHistoryList);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
runnerUtils.runOne(
|
||||
MAIN_TENANT,
|
||||
"io.kestra.tests",
|
||||
"retry-new-execution-flow-duration",
|
||||
null,
|
||||
null
|
||||
);
|
||||
|
||||
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
|
||||
receive.blockLast();
|
||||
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
|
||||
public void retryNewExecutionFlowDuration(String tenant) throws TimeoutException, QueueException {
|
||||
var flow = flowRepository
|
||||
.findById(tenant, "io.kestra.tests", "retry-new-execution-flow-duration")
|
||||
.orElseThrow();
|
||||
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
|
||||
}
|
||||
|
||||
public void retryNewExecutionFlowAttempts() throws TimeoutException, QueueException {
|
||||
CountDownLatch countDownLatch = new CountDownLatch(3);
|
||||
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
|
||||
|
||||
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
|
||||
Execution execution = either.getLeft();
|
||||
if (execution.getFlowId().equals("retry-new-execution-flow-attempts") && execution.getState().getCurrent().isTerminated()) {
|
||||
List<State.Type> stateHistoryList = stateHistory.get();
|
||||
stateHistoryList.add(execution.getState().getCurrent());
|
||||
stateHistory.set(stateHistoryList);
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
});
|
||||
public void retryNewExecutionFlowAttempts(String tenant) throws TimeoutException, QueueException {
|
||||
var flow = flowRepository
|
||||
.findById(tenant, "io.kestra.tests", "retry-new-execution-flow-attempts")
|
||||
.orElseThrow();
|
||||
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
|
||||
}
|
||||
|
||||
private void runAndAssertThereWasTwoRetriesAndFinishedFailed(Flow flow) throws TimeoutException, QueueException {
|
||||
runnerUtils.runOne(
|
||||
MAIN_TENANT,
|
||||
"io.kestra.tests",
|
||||
"retry-new-execution-flow-attempts",
|
||||
null,
|
||||
null
|
||||
Execution.newExecution(flow, null),
|
||||
flow,
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
|
||||
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
|
||||
receive.blockLast();
|
||||
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
|
||||
Await.until(
|
||||
() -> "flow should have ended in Failed state",
|
||||
() -> executionRepository.findLatestForStates(flow.getTenantId(), flow.getNamespace(), flow.getId(), List.of(State.Type.FAILED)).isPresent(),
|
||||
Duration.ofMillis(100),
|
||||
Duration.ofSeconds(10)
|
||||
);
|
||||
var executions = executionRepository.findByFlowId(flow.getTenantId(), flow.getNamespace(), flow.getId(), Pageable.UNPAGED);
|
||||
assertThat(executions.stream().map(e -> e.getState().getCurrent())).contains(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
|
||||
}
|
||||
|
||||
public void retryFailedTaskDuration(Execution execution) {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
|
||||
assertThat(execution.getTaskRunList().getFirst().attemptNumber()).isEqualTo(3);
|
||||
assertThat(execution.getTaskRunList().getFirst().attemptNumber()).isGreaterThanOrEqualTo(2);
|
||||
}
|
||||
|
||||
public void retryFailedTaskAttempts(Execution execution) {
|
||||
|
||||
@@ -10,7 +10,7 @@ import io.kestra.core.models.Label;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -21,7 +21,7 @@ import org.junit.jupiter.api.Test;
|
||||
class RuntimeLabelsTest {
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/labels-update-task.yml"})
|
||||
|
||||
@@ -10,7 +10,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import java.util.List;
|
||||
@@ -24,7 +24,7 @@ class StateTest {
|
||||
public static final String FLOW_ID = "state";
|
||||
public static final String NAMESPACE = "io.kestra.tests";
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
|
||||
@@ -9,7 +9,7 @@ import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -27,7 +27,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
class SubflowRunnerTest {
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Inject
|
||||
private ExecutionRepositoryInterface executionRepository;
|
||||
|
||||
@@ -9,7 +9,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -18,7 +18,7 @@ import org.junit.jupiter.api.Test;
|
||||
class SwitchTest {
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows(value = {"flows/valids/switch.yaml"}, tenantId = "switch")
|
||||
|
||||
@@ -11,7 +11,7 @@ import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.TemplateRepositoryInterface;
|
||||
import io.kestra.core.runners.FlowInputOutput;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.plugin.core.log.Log;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import io.micronaut.context.annotation.Property;
|
||||
@@ -45,7 +45,7 @@ public class TemplateTest {
|
||||
private FlowInputOutput flowIO;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
public static final io.kestra.core.models.templates.Template TEMPLATE_1 = io.kestra.core.models.templates.Template.builder()
|
||||
.id("template")
|
||||
@@ -54,7 +54,7 @@ public class TemplateTest {
|
||||
.tasks(Collections.singletonList(Log.builder().id("test").type(Log.class.getName()).message("{{ parent.outputs.args['my-forward'] }}").build())).build();
|
||||
|
||||
public static void withTemplate(
|
||||
RunnerUtils runnerUtils,
|
||||
TestRunnerUtils runnerUtils,
|
||||
TemplateRepositoryInterface templateRepository,
|
||||
QueueInterface<LogEntry> logQueue,
|
||||
FlowInputOutput flowIO
|
||||
@@ -90,7 +90,7 @@ public class TemplateTest {
|
||||
}
|
||||
|
||||
|
||||
public static void withFailedTemplate(RunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
|
||||
public static void withFailedTemplate(TestRunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
|
||||
List<LogEntry> logs = new CopyOnWriteArrayList<>();
|
||||
Flux<LogEntry> receive = TestsUtils.receive(logQueue, either -> logs.add(either.getLeft()));
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -37,7 +37,7 @@ class TimeoutTest {
|
||||
private QueueInterface<LogEntry> workerTaskLogQueue;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@RetryingTest(5) // Flaky on CI but never locally even with 100 repetitions
|
||||
void timeout() throws TimeoutException, QueueException {
|
||||
|
||||
@@ -12,7 +12,7 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
@@ -31,7 +31,7 @@ class VariablesTest {
|
||||
QueueInterface<LogEntry> workerTaskLogQueue;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@ExecuteFlow("flows/valids/variables.yaml")
|
||||
|
||||
@@ -16,7 +16,7 @@ import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.tasks.common.EncryptedString;
|
||||
import io.kestra.core.queues.QueueException;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.core.storages.InternalStorage;
|
||||
import io.kestra.core.storages.StorageContext;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
@@ -32,7 +32,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junitpioneer.jupiter.RetryingTest;
|
||||
|
||||
@KestraTest(startRunner = true)
|
||||
public class WorkingDirectoryTest {
|
||||
@@ -43,7 +42,7 @@ public class WorkingDirectoryTest {
|
||||
RunContextFactory runContextFactory;
|
||||
|
||||
@Inject
|
||||
RunnerUtils runnerUtils;
|
||||
TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/working-directory.yaml"})
|
||||
@@ -122,7 +121,7 @@ public class WorkingDirectoryTest {
|
||||
@Inject
|
||||
StorageInterface storageInterface;
|
||||
|
||||
public void success(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
public void success(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory", null,
|
||||
(f, e) -> ImmutableMap.of("failed", "false"), Duration.ofSeconds(60)
|
||||
);
|
||||
@@ -132,7 +131,7 @@ public class WorkingDirectoryTest {
|
||||
assertThat((String) execution.getTaskRunList().get(3).getOutputs().get("value")).startsWith("kestra://");
|
||||
}
|
||||
|
||||
public void failed(String tenantId, RunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
public void failed(String tenantId, TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "working-directory", null,
|
||||
(f, e) -> ImmutableMap.of("failed", "true"), Duration.ofSeconds(60)
|
||||
);
|
||||
@@ -142,7 +141,7 @@ public class WorkingDirectoryTest {
|
||||
assertThat(execution.findTaskRunsByTaskId("error-t1")).hasSize(1);
|
||||
}
|
||||
|
||||
public void each(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
public void each(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-each", Duration.ofSeconds(60));
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(8);
|
||||
@@ -151,7 +150,7 @@ public class WorkingDirectoryTest {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void outputFiles(String tenantId, RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
public void outputFiles(String tenantId, TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
|
||||
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "working-directory-outputs");
|
||||
|
||||
@@ -178,7 +177,7 @@ public class WorkingDirectoryTest {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void inputFiles(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
public void inputFiles(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-inputs");
|
||||
|
||||
@@ -204,7 +203,7 @@ public class WorkingDirectoryTest {
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "OptionalGetWithoutIsPresent"})
|
||||
public void cache(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
public void cache(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
// make sure the cache didn't exist
|
||||
StorageContext storageContext = StorageContext.forFlow(Flow
|
||||
.builder()
|
||||
@@ -247,7 +246,7 @@ public class WorkingDirectoryTest {
|
||||
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
|
||||
}
|
||||
|
||||
public void taskRun(RunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
|
||||
public void taskRun(TestRunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-taskrun");
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(3);
|
||||
@@ -255,7 +254,7 @@ public class WorkingDirectoryTest {
|
||||
assertThat(((String) execution.findTaskRunByTaskIdAndValue("log-taskrun", List.of("1")).getOutputs().get("value"))).contains("1");
|
||||
}
|
||||
|
||||
public void taskRunNested(RunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
|
||||
public void taskRunNested(TestRunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-taskrun-nested");
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(6);
|
||||
@@ -263,7 +262,7 @@ public class WorkingDirectoryTest {
|
||||
assertThat(((String) execution.findTaskRunByTaskIdAndValue("log-workerparent", List.of("1")).getOutputs().get("value"))).contains("{\"taskrun\":{\"value\":\"1\"}}");
|
||||
}
|
||||
|
||||
public void namespaceFiles(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
public void namespaceFiles(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
put("/test/a/b/c/1.txt", "first");
|
||||
put("/a/b/c/2.txt", "second");
|
||||
put("/a/b/3.txt", "third");
|
||||
@@ -279,7 +278,7 @@ public class WorkingDirectoryTest {
|
||||
assertThat(execution.findTaskRunsByTaskId("t3").getFirst().getOutputs().get("value")).isEqualTo("third");
|
||||
}
|
||||
|
||||
public void namespaceFilesWithNamespaces(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
public void namespaceFilesWithNamespaces(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
|
||||
//fist namespace
|
||||
put("/test/a/b/c/1.txt", "first in first namespace", "io.test.first");
|
||||
put("/a/b/c/2.txt", "second in first namespace", "io.test.first");
|
||||
@@ -304,7 +303,7 @@ public class WorkingDirectoryTest {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void encryption(RunnerUtils runnerUtils, RunContextFactory runContextFactory) throws TimeoutException, GeneralSecurityException, QueueException {
|
||||
public void encryption(TestRunnerUtils runnerUtils, RunContextFactory runContextFactory) throws TimeoutException, GeneralSecurityException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-taskrun-encrypted");
|
||||
|
||||
assertThat(execution.getTaskRunList()).hasSize(3);
|
||||
@@ -316,7 +315,7 @@ public class WorkingDirectoryTest {
|
||||
assertThat(execution.findTaskRunsByTaskId("decrypted").getFirst().getOutputs().get("value")).isEqualTo("Hello World");
|
||||
}
|
||||
|
||||
public void invalidRunIf(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
public void invalidRunIf(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
|
||||
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-invalid-runif", null,
|
||||
(f, e) -> ImmutableMap.of("failed", "false"), Duration.ofSeconds(60)
|
||||
);
|
||||
|
||||
@@ -130,7 +130,7 @@ public class PurgeKVTest {
|
||||
KVStore kvStore2 = runContext.namespaceKv(namespace2);
|
||||
kvStore2.put(KEY_EXPIRED, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMillis(1L)), "unused"));
|
||||
kvStore2.put(KEY, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMinutes(1L)), "unused"));
|
||||
kvStore2.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused", null), "unused"));
|
||||
kvStore2.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused", (Duration) null), "unused"));
|
||||
kvStore2.put(KEY3_NEVER_EXPIRING, new KVValueAndMetadata(null, "unused"));
|
||||
|
||||
PurgeKV purgeKV = PurgeKV.builder()
|
||||
@@ -156,7 +156,7 @@ public class PurgeKVTest {
|
||||
KVStore kvStore1 = runContext.namespaceKv(namespace);
|
||||
kvStore1.put(KEY_EXPIRED, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMillis(1L)), "unused"));
|
||||
kvStore1.put(KEY, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMinutes(1L)), "unused"));
|
||||
kvStore1.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused", null), "unused"));
|
||||
kvStore1.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused",(Duration) null), "unused"));
|
||||
kvStore1.put(KEY3_NEVER_EXPIRING, new KVValueAndMetadata(null, "unused"));
|
||||
|
||||
PurgeKV purgeKV = PurgeKV.builder()
|
||||
|
||||
@@ -11,6 +11,7 @@ import io.kestra.core.storages.kv.KVStore;
|
||||
import io.kestra.core.storages.kv.KVStoreException;
|
||||
import io.kestra.core.storages.kv.KVValue;
|
||||
import io.kestra.core.storages.kv.KVValueAndMetadata;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
@@ -169,6 +170,7 @@ class SetTest {
|
||||
@Test
|
||||
void shouldFailGivenExistingKeyAndOverwriteFalse() throws Exception {
|
||||
// Given
|
||||
String key = IdUtils.create();
|
||||
Set set = Set.builder()
|
||||
.id(Set.class.getSimpleName())
|
||||
.type(Set.class.getName())
|
||||
@@ -179,16 +181,22 @@ class SetTest {
|
||||
|
||||
var value = Map.of("date", Instant.now().truncatedTo(ChronoUnit.MILLIS), "int", 1, "string", "string");
|
||||
final RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, set, Map.of(
|
||||
"key", "existing_key",
|
||||
"key", key,
|
||||
"value", value
|
||||
));
|
||||
|
||||
// When - Then
|
||||
//set key a first:
|
||||
runContext.namespaceKv(runContext.flowInfo().namespace()).put("existing_key", new KVValueAndMetadata(new KVMetadata("unused", null), value));
|
||||
runContext.namespaceKv(runContext.flowInfo().namespace()).put(key, new KVValueAndMetadata(new KVMetadata("unused", (Instant)null), value));
|
||||
//fail because key is already set
|
||||
KVStoreException exception = Assertions.assertThrows(KVStoreException.class, () -> set.run(runContext));
|
||||
assertThat(exception.getMessage()).isEqualTo("Cannot set value for key 'existing_key'. Key already exists and `overwrite` is set to `false`.");
|
||||
KVStoreException exception = Assertions.assertThrows(KVStoreException.class, () -> Set.builder()
|
||||
.id(Set.class.getSimpleName())
|
||||
.type(Set.class.getName())
|
||||
.key(new Property<>("{{ inputs.key }}"))
|
||||
.value(new Property<>("{{ inputs.value }}"))
|
||||
.overwrite(Property.ofValue(false))
|
||||
.build().run(runContext));
|
||||
assertThat(exception.getMessage()).isEqualTo("Cannot set value for key '%s'. Key already exists and `overwrite` is set to `false`.".formatted(key));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -5,7 +5,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.executions.LogEntry;
|
||||
import io.kestra.core.repositories.LogRepositoryInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.stream.Stream;
|
||||
@@ -29,7 +29,7 @@ class PurgeLogsTest {
|
||||
private LogRepositoryInterface logRepository;
|
||||
|
||||
@Inject
|
||||
protected RunnerUtils runnerUtils;
|
||||
protected TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows("flows/valids/purge_logs_no_arguments.yaml")
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
package io.kestra.plugin.core.storage;
|
||||
|
||||
import io.kestra.core.context.TestRunContextFactory;
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.runners.RunContextFactory;
|
||||
import io.kestra.core.storages.StorageInterface;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -19,7 +20,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
@@ -27,16 +27,16 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
@KestraTest
|
||||
class LocalFilesTest {
|
||||
@Inject
|
||||
RunContextFactory runContextFactory;
|
||||
TestRunContextFactory runContextFactory;
|
||||
|
||||
@Inject
|
||||
StorageInterface storageInterface;
|
||||
|
||||
private URI internalFiles() throws IOException, URISyntaxException {
|
||||
private URI internalFiles(String tenantId) throws IOException, URISyntaxException {
|
||||
var resource = ConcatTest.class.getClassLoader().getResource("application-test.yml");
|
||||
|
||||
return storageInterface.put(
|
||||
MAIN_TENANT,
|
||||
tenantId,
|
||||
null,
|
||||
new URI("/file/storage/get.yml"),
|
||||
new FileInputStream(Objects.requireNonNull(resource).getFile())
|
||||
@@ -46,8 +46,9 @@ class LocalFilesTest {
|
||||
|
||||
@Test
|
||||
void run() throws Exception {
|
||||
var runContext = runContextFactory.of(Map.of("toto", "tata"));
|
||||
var storageFile = internalFiles();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var runContext = runContextFactory.of("namesapce", tenant, Map.of("toto", "tata"));
|
||||
var storageFile = internalFiles(tenant);
|
||||
|
||||
var task = LocalFiles.builder()
|
||||
.id(IdUtils.create())
|
||||
@@ -64,18 +65,19 @@ class LocalFilesTest {
|
||||
assertThat(outputs).isNotNull();
|
||||
assertThat(outputs.getUris()).isNotNull();
|
||||
assertThat(outputs.getUris().size()).isEqualTo(1);
|
||||
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
|
||||
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
|
||||
assertThat(runContext.workingDir().path().toFile().list().length).isEqualTo(2);
|
||||
assertThat(Files.readString(runContext.workingDir().path().resolve("execution.txt"))).isEqualTo("tata");
|
||||
assertThat(Files.readString(runContext.workingDir().path().resolve("application-test.yml"))).isEqualTo(new String(storageInterface.get(MAIN_TENANT, null, storageFile).readAllBytes()));
|
||||
assertThat(Files.readString(runContext.workingDir().path().resolve("application-test.yml"))).isEqualTo(new String(storageInterface.get(tenant, null, storageFile).readAllBytes()));
|
||||
|
||||
runContext.cleanup();
|
||||
}
|
||||
|
||||
@Test
|
||||
void recursive() throws Exception {
|
||||
var runContext = runContextFactory.of(Map.of("toto", "tata"));
|
||||
var storageFile = internalFiles();
|
||||
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
|
||||
var runContext = runContextFactory.of("namesapce", tenant, Map.of("toto", "tata"));
|
||||
var storageFile = internalFiles(tenant);
|
||||
|
||||
var task = LocalFiles.builder()
|
||||
.id(IdUtils.create())
|
||||
@@ -92,11 +94,11 @@ class LocalFilesTest {
|
||||
assertThat(outputs).isNotNull();
|
||||
assertThat(outputs.getUris()).isNotNull();
|
||||
assertThat(outputs.getUris().size()).isEqualTo(3);
|
||||
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("test/hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
|
||||
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("test/sub/dir/2/execution.txt"))
|
||||
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("test/hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
|
||||
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("test/sub/dir/2/execution.txt"))
|
||||
.readAllBytes())).isEqualTo("tata");
|
||||
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("test/sub/dir/3/application-test.yml"))
|
||||
.readAllBytes())).isEqualTo(new String(storageInterface.get(MAIN_TENANT, null, storageFile).readAllBytes()));
|
||||
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("test/sub/dir/3/application-test.yml"))
|
||||
.readAllBytes())).isEqualTo(new String(storageInterface.get(tenant, null, storageFile).readAllBytes()));
|
||||
runContext.cleanup();
|
||||
}
|
||||
|
||||
|
||||
@@ -11,12 +11,14 @@ import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.queues.QueueFactoryInterface;
|
||||
import io.kestra.core.queues.QueueInterface;
|
||||
import io.kestra.core.repositories.TriggerRepositoryInterface;
|
||||
import io.kestra.core.runners.RunnerUtils;
|
||||
import io.kestra.core.runners.TestRunnerUtils;
|
||||
import io.kestra.scheduler.AbstractScheduler;
|
||||
import io.kestra.core.utils.Await;
|
||||
import io.kestra.core.utils.TestsUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Named;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@@ -36,13 +38,13 @@ class ToggleTest {
|
||||
private AbstractScheduler scheduler;
|
||||
|
||||
@Inject
|
||||
private RunnerUtils runnerUtils;
|
||||
private TestRunnerUtils runnerUtils;
|
||||
|
||||
@Test
|
||||
@LoadFlows({"flows/valids/trigger-toggle.yaml"})
|
||||
void toggle() throws Exception {
|
||||
// we need to await for the scheduler to be ready otherwise there may be an issue with updating the trigger
|
||||
Await.until(() -> scheduler.isReady());
|
||||
Await.until(() -> scheduler.isReady(), Duration.ofMillis(100), Duration.ofSeconds(20));
|
||||
|
||||
Trigger trigger = Trigger
|
||||
.builder()
|
||||
|
||||
36
core/src/test/resources/flows/valids/exit-nested.yaml
Normal file
36
core/src/test/resources/flows/valids/exit-nested.yaml
Normal file
@@ -0,0 +1,36 @@
|
||||
id: exit-nested
|
||||
namespace: io.kestra.tests
|
||||
|
||||
inputs:
|
||||
- id: someBool
|
||||
type: BOOL
|
||||
defaults: true
|
||||
|
||||
- id: secondBool
|
||||
type: BOOL
|
||||
defaults: false
|
||||
|
||||
tasks:
|
||||
- id: if_some_bool
|
||||
type: io.kestra.plugin.core.flow.If
|
||||
condition: "{{ inputs.someBool }}"
|
||||
then:
|
||||
- id: was_true
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: The value was true
|
||||
|
||||
- id: nested_bool_check
|
||||
type: io.kestra.plugin.core.flow.If
|
||||
condition: "{{ inputs.secondBool }}"
|
||||
then:
|
||||
- id: was_also_true
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Was also true
|
||||
else:
|
||||
- id: nested_was_false
|
||||
type: io.kestra.plugin.core.execution.Exit
|
||||
state: FAILED
|
||||
else:
|
||||
- id: was_false
|
||||
type: io.kestra.plugin.core.execution.Exit
|
||||
state: FAILED
|
||||
@@ -0,0 +1,8 @@
|
||||
id: flow-concurrency-subflow
|
||||
namespace: io.kestra.tests
|
||||
|
||||
tasks:
|
||||
- id: subflow
|
||||
type: io.kestra.plugin.core.flow.Subflow
|
||||
namespace: io.kestra.tests
|
||||
flowId: flow-concurrency-cancel
|
||||
@@ -7,6 +7,6 @@ tasks:
|
||||
retry:
|
||||
behavior: RETRY_FAILED_TASK
|
||||
type: constant
|
||||
maxDuration: PT6S
|
||||
maxDuration: PT7.5S
|
||||
interval: PT2S
|
||||
|
||||
|
||||
@@ -7,5 +7,8 @@ concurrency:
|
||||
|
||||
tasks:
|
||||
- id: sleep
|
||||
type: io.kestra.plugin.core.flow.Sleep
|
||||
duration: PT0.5S
|
||||
- id: sleep_1
|
||||
type: io.kestra.plugin.core.flow.Sleep
|
||||
duration: PT0.5S
|
||||
4
dev-tools/kestra-devtools/.gitignore
vendored
4
dev-tools/kestra-devtools/.gitignore
vendored
@@ -1,4 +0,0 @@
|
||||
node_modules
|
||||
dist
|
||||
coverage
|
||||
.DS_Store
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"semi": true,
|
||||
"singleQuote": false,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 100
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
// @ts-check
|
||||
import eslint from "@eslint/js";
|
||||
import { defineConfig } from "eslint/config";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
export default defineConfig(
|
||||
{
|
||||
ignores: ["dist/**", "coverage/**", "node_modules/**"],
|
||||
},
|
||||
eslint.configs.recommended,
|
||||
tseslint.configs.recommended,
|
||||
);
|
||||
7175
dev-tools/kestra-devtools/package-lock.json
generated
7175
dev-tools/kestra-devtools/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user