Compare commits

..

7 Commits

Author SHA1 Message Date
Roman Acevedo
93d53b9d57 try out to run in parallel JdbcRunnerRetryTest, lower exec run duration 2025-09-17 12:24:53 +02:00
Roman Acevedo
11e5e14e4e fix Timeline flamegraph 2025-09-17 11:44:04 +02:00
Roman Acevedo
72ce317c3d Update workflow-backend-test.yml 2025-09-17 11:44:04 +02:00
Roman Acevedo
4da44013c1 add Timeline flamegraph to temp debug tests on this branch 2025-09-17 11:44:04 +02:00
nKwiatkowski
c9995c6f42 fix(tests): failing unit tests 2025-09-17 10:31:39 +02:00
nKwiatkowski
a409299dd8 feat(tests): play jdbc h2 tests in parallel 2025-09-16 19:23:07 +02:00
Roman Acevedo
34cf67b0a4 test: make AbstractExecutionRepositoryTest parallelizable 2025-09-16 19:23:07 +02:00
343 changed files with 14797 additions and 5460 deletions

View File

@@ -0,0 +1,32 @@
name: kestra-devtools test
on:
pull_request:
branches:
- develop
paths:
- 'dev-tools/kestra-devtools/**'
env:
# to save corepack from itself
COREPACK_INTEGRITY_KEYS: 0
jobs:
test:
name: kestra-devtools tests
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Npm - install
working-directory: 'dev-tools/kestra-devtools'
run: npm ci
- name: Run tests
working-directory: 'dev-tools/kestra-devtools'
run: npm run test
- name: Npm - Run build
working-directory: 'dev-tools/kestra-devtools'
run: npm run build

View File

@@ -4,7 +4,6 @@ on:
pull_request:
branches:
- develop
- releases/*
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-pr

View File

@@ -64,11 +64,104 @@ jobs:
if: ${{ !cancelled() && github.event_name == 'pull_request' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
run: npx --yes @kestra-io/kestra-devtools generateTestReportSummary --only-errors --ci $(pwd)
# Report Java
- name: Report - Java
uses: kestra-io/actions/composite/report-java@main
if: ${{ !cancelled() }}
run: |
export KESTRA_PWD=$(pwd) && sh -c 'cd dev-tools/kestra-devtools && npm ci && npm run build && node dist/kestra-devtools-cli.cjs generateTestReportSummary --only-errors --ci $KESTRA_PWD' > report.md
cat report.md
# Gradle check
- name: 'generate Timeline flamegraph'
if: always()
env:
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
shell: bash
run: |
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
./gradlew mergeTestTimeline
- name: 'Upload Timeline flamegraph'
uses: actions/upload-artifact@v4
if: always()
with:
secrets: ${{ toJSON(secrets) }}
name: all-test-timelines.json
path: build/reports/test-timelines-report/all-test-timelines.json
retention-days: 5
# report test
- name: Test - Publish Test Results
uses: dorny/test-reporter@v2
if: always()
with:
name: Java Tests Report
reporter: java-junit
path: '**/build/test-results/test/TEST-*.xml'
list-suites: 'failed'
list-tests: 'failed'
fail-on-error: 'false'
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
# Sonar
- name: Test - Analyze with Sonar
if: env.SONAR_TOKEN != ''
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
shell: bash
run: ./gradlew sonar --info
# GCP
- name: GCP - Auth with unit test account
id: auth
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
continue-on-error: true
uses: "google-github-actions/auth@v3"
with:
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
- name: GCP - Setup Cloud SDK
if: env.GOOGLE_SERVICE_ACCOUNT != ''
uses: "google-github-actions/setup-gcloud@v3"
# Allure check
- uses: rlespinasse/github-slug-action@v5
name: Allure - Generate slug variables
- name: Allure - Publish report
uses: andrcuns/allure-publish-action@v2.9.0
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
continue-on-error: true
env:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
JAVA_HOME: /usr/lib/jvm/default-jvm/
with:
storageType: gcs
resultsGlob: "**/build/allure-results"
bucket: internal-kestra-host
baseUrl: "https://internal.dev.kestra.io"
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
copyLatest: true
ignoreMissingResults: true
# Jacoco
- name: Jacoco - Copy reports
if: env.GOOGLE_SERVICE_ACCOUNT != ''
continue-on-error: true
shell: bash
run: |
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
# Codecov
- name: Codecov - Upload coverage reports
uses: codecov/codecov-action@v5
if: ${{ !cancelled() }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend
- name: Codecov - Upload test results
uses: codecov/test-results-action@v1
if: ${{ !cancelled() }}
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: backend

View File

@@ -33,10 +33,10 @@
<p align="center">
<a href="https://go.kestra.io/video/product-overview" target="_blank">
<img src="https://kestra.io/startvideo.png" alt="Get started in 3 minutes with Kestra" width="640px" />
<img src="https://kestra.io/startvideo.png" alt="Get started in 4 minutes with Kestra" width="640px" />
</a>
</p>
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 3 minutes.</i></p>
<p align="center" style="color:grey;"><i>Click on the image to learn how to get started with Kestra in 4 minutes.</i></p>
## 🌟 What is Kestra?

View File

@@ -37,7 +37,7 @@ plugins {
id "com.vanniktech.maven.publish" version "0.34.0"
// OWASP dependency check
id "org.owasp.dependencycheck" version "12.1.5" apply false
id "org.owasp.dependencycheck" version "12.1.3" apply false
}
idea {
@@ -231,8 +231,45 @@ subprojects {subProj ->
environment 'ENV_TEST1', "true"
environment 'ENV_TEST2', "Pass by env"
// === Test Timeline Trace (Chrome trace format) ===
// Produces per-JVM ndjson under build/test-timelines/*.jsonl and a merged array via :mergeTestTimeline
// Each event has: start time (ts, µs since epoch), end via dur, and absolute duration (dur, µs)
doFirst {
file("${buildDir}/test-results/test-timelines").mkdirs()
}
if (subProj.name == 'core' || subProj.name == 'jdbc-h2' || subProj.name == 'jdbc-mysql' || subProj.name == 'jdbc-postgres') {
def jvmName = java.lang.management.ManagementFactory.runtimeMXBean.name
def pid = jvmName.tokenize('@')[0]
def traceDir = file("${buildDir}/test-results/test-timelines")
def traceFile = new File(traceDir, "${project.name}-${name}-${pid}.jsonl")
def starts = new java.util.concurrent.ConcurrentHashMap<Object, Long>()
beforeTest { org.gradle.api.tasks.testing.TestDescriptor d ->
// epoch millis to allow cross-JVM merge
starts.put(d, System.currentTimeMillis())
}
afterTest { org.gradle.api.tasks.testing.TestDescriptor d, org.gradle.api.tasks.testing.TestResult r ->
def st = starts.remove(d)
if (st != null) {
def en = System.currentTimeMillis()
long tsMicros = st * 1000L // start time (µs since epoch)
long durMicros = (en - st) * 1000L // duration (µs)
def ev = [
name: (d.className ? d.className + '.' + d.name : d.name),
cat : 'test',
ph : 'X', // Complete event with duration
ts : tsMicros,
dur : durMicros,
pid : project.name, // group by project/module
tid : "${name}-worker-${pid}",
args: [result: r.resultType.toString()]
]
synchronized (traceFile.absolutePath.intern()) {
traceFile << (groovy.json.JsonOutput.toJson(ev) + System.lineSeparator())
}
}
}
if (subProj.name == 'core' || subProj.name == 'jdbc-h2') {
// JUnit 5 parallel settings
systemProperty 'junit.jupiter.execution.parallel.enabled', 'true'
systemProperty 'junit.jupiter.execution.parallel.mode.default', 'concurrent'
@@ -253,7 +290,53 @@ subprojects {subProj ->
}
}
}
// Root-level aggregator: merge timelines from ALL modules into one Chrome trace
if (project == rootProject) {
tasks.register('mergeTestTimeline') {
group = 'verification'
description = 'Merge per-worker test timeline ndjson from all modules into a single Chrome Trace JSON array.'
doLast {
def collectedFiles = [] as List<File>
// Collect *.jsonl files from every subproject
rootProject.subprojects.each { p ->
def dir = p.file("${p.buildDir}/test-results/test-timelines")
if (dir.exists()) {
collectedFiles.addAll(p.fileTree(dir: dir, include: '*.jsonl').files)
}
}
if (collectedFiles.isEmpty()) {
logger.lifecycle("No timeline files found in any subproject. Run tests first (e.g., './gradlew test --parallel').")
return
}
collectedFiles = collectedFiles.sort { it.name }
def outDir = rootProject.file("${rootProject.buildDir}/reports/test-timelines-report")
outDir.mkdirs()
def out = new File(outDir, "all-test-timelines.json")
out.withWriter('UTF-8') { w ->
w << '['
boolean first = true
collectedFiles.each { f ->
f.eachLine { line ->
def trimmed = line?.trim()
if (trimmed) {
if (!first) w << ','
w << trimmed
first = false
}
}
}
w << ']'
}
logger.lifecycle("Merged ${collectedFiles.size()} files into ${out} — open it in chrome://tracing or Perfetto UI.")
}
}
}
/**********************************************************************************************************************\
* End-to-End Tests
**********************************************************************************************************************/

View File

@@ -49,7 +49,7 @@ import java.util.concurrent.Callable;
@Introspected
public class App implements Callable<Integer> {
public static void main(String[] args) {
execute(App.class, new String [] { Environment.CLI }, args);
execute(App.class, args);
}
@Override
@@ -57,13 +57,13 @@ public class App implements Callable<Integer> {
return PicocliRunner.call(App.class, "--help");
}
protected static void execute(Class<?> cls, String[] environments, String... args) {
protected static void execute(Class<?> cls, String... args) {
// Log Bridge
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
// Init ApplicationContext
ApplicationContext applicationContext = App.applicationContext(cls, environments, args);
ApplicationContext applicationContext = App.applicationContext(cls, args);
// Call Picocli command
int exitCode = 0;
@@ -80,6 +80,17 @@ public class App implements Callable<Integer> {
System.exit(Objects.requireNonNullElse(exitCode, 0));
}
/**
* Create an {@link ApplicationContext} with additional properties based on configuration files (--config) and
* forced Properties from current command.
*
* @param args args passed to java app
* @return the application context created
*/
protected static ApplicationContext applicationContext(Class<?> mainClass,
String[] args) {
return applicationContext(mainClass, new String [] { Environment.CLI }, args);
}
/**
* Create an {@link ApplicationContext} with additional properties based on configuration files (--config) and

View File

@@ -2,27 +2,19 @@ package io.kestra.cli.commands.servers;
import io.kestra.cli.AbstractCommand;
import io.kestra.core.contexts.KestraContext;
import lombok.extern.slf4j.Slf4j;
import jakarta.annotation.PostConstruct;
import picocli.CommandLine;
@Slf4j
public abstract class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
abstract public class AbstractServerCommand extends AbstractCommand implements ServerCommandInterface {
@CommandLine.Option(names = {"--port"}, description = "The port to bind")
Integer serverPort;
@Override
public Integer call() throws Exception {
log.info("Machine information: {} available cpu(s), {}MB max memory, Java version {}", Runtime.getRuntime().availableProcessors(), maxMemoryInMB(), Runtime.version());
this.shutdownHook(true, () -> KestraContext.getContext().shutdown());
return super.call();
}
private long maxMemoryInMB() {
return Runtime.getRuntime().maxMemory() / 1024 / 1024;
}
protected static int defaultWorkerThread() {
return Runtime.getRuntime().availableProcessors() * 8;
}

View File

@@ -167,8 +167,6 @@ kestra:
open-urls:
- "/ping"
- "/api/v1/executions/webhook/"
- "/api/v1/main/executions/webhook/"
- "/api/v1/*/executions/webhook/"
preview:
initial-rows: 100

View File

@@ -37,7 +37,7 @@ class AppTest {
final String[] args = new String[]{"server", serverType, "--help"};
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, args)) {
try (ApplicationContext ctx = App.applicationContext(App.class, args)) {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(args);
assertTrue(ctx.getProperty("kestra.server-type", ServerType.class).isEmpty());
@@ -52,7 +52,7 @@ class AppTest {
final String[] argsWithMissingParams = new String[]{"flow", "namespace", "update"};
try (ApplicationContext ctx = App.applicationContext(App.class, new String [] { Environment.CLI }, argsWithMissingParams)) {
try (ApplicationContext ctx = App.applicationContext(App.class, argsWithMissingParams)) {
new CommandLine(App.class, new MicronautFactory(ctx)).execute(argsWithMissingParams);
assertThat(out.toString()).startsWith("Missing required parameters: ");

View File

@@ -18,7 +18,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junitpioneer.jupiter.RetryingTest;
import static io.kestra.core.utils.Rethrow.throwRunnable;
import static org.assertj.core.api.Assertions.assertThat;
@@ -95,7 +94,7 @@ class FileChangedEventListenerTest {
);
}
@RetryingTest(2)
@Test
void testWithPluginDefault() throws IOException, TimeoutException {
var tenant = TestsUtils.randomTenant(FileChangedEventListenerTest.class.getName(), "testWithPluginDefault");
// remove the flow if it already exists

View File

@@ -84,7 +84,7 @@ dependencies {
testImplementation "org.testcontainers:testcontainers:1.21.3"
testImplementation "org.testcontainers:junit-jupiter:1.21.3"
testImplementation "org.bouncycastle:bcpkix-jdk18on"
testImplementation "org.bouncycastle:bcpkix-jdk18on:1.81"
testImplementation "org.wiremock:wiremock-jetty12"
}

View File

@@ -2,13 +2,12 @@ package io.kestra.core.models;
import io.kestra.core.utils.MapUtils;
import jakarta.annotation.Nullable;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.NotNull;
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
public record Label(@NotEmpty String key, @NotEmpty String value) {
public record Label(@NotNull String key, @NotNull String value) {
public static final String SYSTEM_PREFIX = "system.";
// system labels
@@ -42,7 +41,7 @@ public record Label(@NotEmpty String key, @NotEmpty String value) {
public static Map<String, String> toMap(@Nullable List<Label> labels) {
if (labels == null || labels.isEmpty()) return Collections.emptyMap();
return labels.stream()
.filter(label -> label.value() != null && !label.value().isEmpty() && label.key() != null && !label.key().isEmpty())
.filter(label -> label.value() != null && label.key() != null)
// using an accumulator in case labels with the same key exists: the second is kept
.collect(Collectors.toMap(Label::key, Label::value, (first, second) -> second, LinkedHashMap::new));
}
@@ -57,7 +56,6 @@ public record Label(@NotEmpty String key, @NotEmpty String value) {
public static List<Label> deduplicate(@Nullable List<Label> labels) {
if (labels == null || labels.isEmpty()) return Collections.emptyList();
return toMap(labels).entrySet().stream()
.filter(getEntryNotEmptyPredicate())
.map(entry -> new Label(entry.getKey(), entry.getValue()))
.collect(Collectors.toCollection(ArrayList::new));
}
@@ -72,7 +70,6 @@ public record Label(@NotEmpty String key, @NotEmpty String value) {
if (map == null || map.isEmpty()) return List.of();
return map.entrySet()
.stream()
.filter(getEntryNotEmptyPredicate())
.map(entry -> new Label(entry.getKey(), entry.getValue()))
.toList();
}
@@ -91,14 +88,4 @@ public record Label(@NotEmpty String key, @NotEmpty String value) {
}
return map;
}
/**
* Provides predicate for not empty entries.
*
* @return The non-empty filter
*/
public static Predicate<Map.Entry<String, String>> getEntryNotEmptyPredicate() {
return entry -> entry.getKey() != null && !entry.getKey().isEmpty() &&
entry.getValue() != null && !entry.getValue().isEmpty();
}
}

View File

@@ -254,7 +254,19 @@ public record QueryFilter(
*
* @return List of {@code ResourceField} with resource names, fields, and operations.
*/
public static List<ResourceField> asResourceList() {
return Arrays.stream(values())
.map(Resource::toResourceField)
.toList();
}
private static ResourceField toResourceField(Resource resource) {
List<FieldOp> fieldOps = resource.supportedField().stream()
.map(Resource::toFieldInfo)
.toList();
return new ResourceField(resource.name().toLowerCase(), fieldOps);
}
private static FieldOp toFieldInfo(Field field) {
List<Operation> operations = field.supportedOp().stream()
.map(Resource::toOperation)
@@ -267,6 +279,9 @@ public record QueryFilter(
}
}
public record ResourceField(String name, List<FieldOp> fields) {
}
public record FieldOp(String name, String value, List<Operation> operations) {
}

View File

@@ -17,12 +17,31 @@ import java.util.List;
@Introspected
public class ExecutionUsage {
private final List<DailyExecutionStatistics> dailyExecutionsCount;
private final List<DailyExecutionStatistics> dailyTaskRunsCount;
public static ExecutionUsage of(final String tenantId,
final ExecutionRepositoryInterface executionRepository,
final ZonedDateTime from,
final ZonedDateTime to) {
List<DailyExecutionStatistics> dailyTaskRunsCount = null;
try {
dailyTaskRunsCount = executionRepository.dailyStatistics(
null,
tenantId,
null,
null,
null,
from,
to,
DateUtils.GroupType.DAY,
null,
true);
} catch (UnsupportedOperationException ignored) {
}
return ExecutionUsage.builder()
.dailyExecutionsCount(executionRepository.dailyStatistics(
null,
@@ -33,13 +52,28 @@ public class ExecutionUsage {
from,
to,
DateUtils.GroupType.DAY,
null))
null,
false))
.dailyTaskRunsCount(dailyTaskRunsCount)
.build();
}
public static ExecutionUsage of(final ExecutionRepositoryInterface repository,
final ZonedDateTime from,
final ZonedDateTime to) {
List<DailyExecutionStatistics> dailyTaskRunsCount = null;
try {
dailyTaskRunsCount = repository.dailyStatisticsForAllTenants(
null,
null,
null,
from,
to,
DateUtils.GroupType.DAY,
true
);
} catch (UnsupportedOperationException ignored) {}
return ExecutionUsage.builder()
.dailyExecutionsCount(repository.dailyStatisticsForAllTenants(
null,
@@ -47,8 +81,10 @@ public class ExecutionUsage {
null,
from,
to,
DateUtils.GroupType.DAY
DateUtils.GroupType.DAY,
false
))
.dailyTaskRunsCount(dailyTaskRunsCount)
.build();
}
}

View File

@@ -865,18 +865,20 @@ public class Execution implements DeletedInterface, TenantInterface {
* @param e the exception raise
* @return new taskRun with updated attempt with logs
*/
private FailedTaskRunWithLog lastAttemptsTaskRunForFailedExecution(TaskRun taskRun, TaskRunAttempt lastAttempt, Exception e) {
TaskRun failed = taskRun
.withAttempts(
Stream
.concat(
taskRun.getAttempts().stream().limit(taskRun.getAttempts().size() - 1),
Stream.of(lastAttempt.getState().isFailed() ? lastAttempt : lastAttempt.withState(State.Type.FAILED))
)
.toList()
);
private FailedTaskRunWithLog lastAttemptsTaskRunForFailedExecution(TaskRun taskRun,
TaskRunAttempt lastAttempt, Exception e) {
return new FailedTaskRunWithLog(
failed.getState().isFailed() ? failed : failed.withState(State.Type.FAILED),
taskRun
.withAttempts(
Stream
.concat(
taskRun.getAttempts().stream().limit(taskRun.getAttempts().size() - 1),
Stream.of(lastAttempt
.withState(State.Type.FAILED))
)
.toList()
)
.withState(State.Type.FAILED),
RunContextLogger.logEntries(loggingEventFromException(e), LogEntry.of(taskRun, kind))
);
}

View File

@@ -62,7 +62,6 @@ public abstract class AbstractFlow implements FlowInterface {
@JsonSerialize(using = ListOrMapOfLabelSerializer.class)
@JsonDeserialize(using = ListOrMapOfLabelDeserializer.class)
@Schema(implementation = Object.class, oneOf = {List.class, Map.class})
@Valid
List<Label> labels;
@Schema(additionalProperties = Schema.AdditionalPropertiesValue.TRUE)

View File

@@ -185,6 +185,34 @@ public class Trigger extends TriggerContext implements HasUID {
.build();
}
public static Trigger update(Trigger currentTrigger, Trigger newTrigger, ZonedDateTime nextExecutionDate) throws Exception {
Trigger updated = currentTrigger;
// If a backfill is created, we update the currentTrigger
// and set the nextExecutionDate() as the previous one
if (newTrigger.getBackfill() != null) {
updated = currentTrigger.toBuilder()
.backfill(
newTrigger
.getBackfill()
.toBuilder()
.end(newTrigger.getBackfill().getEnd() != null ? newTrigger.getBackfill().getEnd() : ZonedDateTime.now())
.currentDate(
newTrigger.getBackfill().getStart()
)
.previousNextExecutionDate(
currentTrigger.getNextExecutionDate())
.build())
.build();
}
return updated.toBuilder()
.nextExecutionDate(newTrigger.getDisabled() ?
null : nextExecutionDate)
.disabled(newTrigger.getDisabled())
.build();
}
public Trigger resetExecution(Flow flow, Execution execution, ConditionContext conditionContext) {
boolean disabled = this.getStopAfter() != null ? this.getStopAfter().contains(execution.getState().getCurrent()) : this.getDisabled();
if (!disabled) {
@@ -248,22 +276,27 @@ public class Trigger extends TriggerContext implements HasUID {
.build();
}
public Trigger withBackfill(final Backfill backfill) {
Trigger updated = this;
// If a backfill is created, we update the trigger
public Trigger initBackfill(Trigger newTrigger) {
// If a backfill is created, we update the currentTrigger
// and set the nextExecutionDate() as the previous one
if (backfill != null) {
updated = this.toBuilder()
if (newTrigger.getBackfill() != null) {
return this.toBuilder()
.backfill(
backfill
newTrigger
.getBackfill()
.toBuilder()
.end(backfill.getEnd() != null ? backfill.getEnd() : ZonedDateTime.now())
.currentDate(backfill.getStart())
.previousNextExecutionDate(this.getNextExecutionDate())
.end(newTrigger.getBackfill().getEnd() != null ? newTrigger.getBackfill().getEnd() : ZonedDateTime.now())
.currentDate(
newTrigger.getBackfill().getStart()
)
.previousNextExecutionDate(
this.getNextExecutionDate())
.build())
.build();
}
return updated;
return this;
}
// if the next date is after the backfill end, we remove the backfill

View File

@@ -144,7 +144,7 @@ public final class PluginDeserializer<T extends Plugin> extends JsonDeserializer
static String extractPluginRawIdentifier(final JsonNode node, final boolean isVersioningSupported) {
String type = Optional.ofNullable(node.get(TYPE)).map(JsonNode::textValue).orElse(null);
String version = Optional.ofNullable(node.get(VERSION)).map(JsonNode::asText).orElse(null);
String version = Optional.ofNullable(node.get(VERSION)).map(JsonNode::textValue).orElse(null);
if (type == null || type.isEmpty()) {
return null;

View File

@@ -25,6 +25,8 @@ import java.util.Optional;
import java.util.function.Function;
public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Execution>, QueryBuilderInterface<Executions.Fields> {
Boolean isTaskRunEnabled();
default Optional<Execution> findById(String tenantId, String id) {
return findById(tenantId, id, false);
}
@@ -94,6 +96,12 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
Flux<Execution> findAllAsync(@Nullable String tenantId);
ArrayListTotal<TaskRun> findTaskRun(
Pageable pageable,
@Nullable String tenantId,
List<QueryFilter> filters
);
Execution delete(Execution execution);
Integer purge(Execution execution);
@@ -104,7 +112,8 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
@Nullable String flowId,
@Nullable ZonedDateTime startDate,
@Nullable ZonedDateTime endDate,
@Nullable DateUtils.GroupType groupBy
@Nullable DateUtils.GroupType groupBy,
boolean isTaskRun
);
List<DailyExecutionStatistics> dailyStatistics(
@@ -116,7 +125,8 @@ public interface ExecutionRepositoryInterface extends SaveRepositoryInterface<Ex
@Nullable ZonedDateTime startDate,
@Nullable ZonedDateTime endDate,
@Nullable DateUtils.GroupType groupBy,
List<State.Type> state
List<State.Type> state,
boolean isTaskRun
);
@Getter

View File

@@ -5,7 +5,10 @@ import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.models.Label;
import io.kestra.core.models.executions.*;
import io.kestra.core.models.flows.*;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowInterface;
import io.kestra.core.models.flows.FlowWithException;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.property.Property;
import io.kestra.core.models.tasks.ExecutableTask;
import io.kestra.core.models.tasks.Task;
@@ -26,7 +29,6 @@ import org.apache.commons.lang3.stream.Streams;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.stream.Collectors;
import static io.kestra.core.trace.Tracer.throwCallable;
import static io.kestra.core.utils.Rethrow.throwConsumer;
@@ -151,24 +153,17 @@ public final class ExecutableUtils {
currentFlow.getNamespace(),
currentFlow.getId()
)
.orElseThrow(() -> {
String msg = "Unable to find flow '" + subflowNamespace + "'.'" + subflowId + "' with revision '" + subflowRevision.orElse(0) + "'";
runContext.logger().error(msg);
return new IllegalStateException(msg);
});
.orElseThrow(() -> new IllegalStateException("Unable to find flow '" + subflowNamespace + "'.'" + subflowId + "' with revision '" + subflowRevision.orElse(0) + "'"));
if (flow.isDisabled()) {
String msg = "Cannot execute a flow which is disabled";
runContext.logger().error(msg);
throw new IllegalStateException(msg);
throw new IllegalStateException("Cannot execute a flow which is disabled");
}
if (flow instanceof FlowWithException fwe) {
String msg = "Cannot execute an invalid flow: " + fwe.getException();
runContext.logger().error(msg);
throw new IllegalStateException(msg);
throw new IllegalStateException("Cannot execute an invalid flow: " + fwe.getException());
}
List<Label> newLabels = inheritLabels ? new ArrayList<>(filterLabels(currentExecution.getLabels(), flow)) : new ArrayList<>(systemLabels(currentExecution));
List<Label> newLabels = inheritLabels ? new ArrayList<>(filterLabels(currentExecution.getLabels(), flow)) : new ArrayList<>(systemLabels(currentExecution));
if (labels != null) {
labels.forEach(throwConsumer(label -> newLabels.add(new Label(runContext.render(label.key()), runContext.render(label.value())))));
}
@@ -206,20 +201,7 @@ public final class ExecutableUtils {
.build()
)
.withScheduleDate(scheduleOnDate);
if(execution.getInputs().size()<inputs.size()) {
Map<String,Object>resolvedInputs=execution.getInputs();
for (var inputKey : inputs.keySet()) {
if (!resolvedInputs.containsKey(inputKey)) {
runContext.logger().warn(
"Input {} was provided by parent execution {} for subflow {}.{} but isn't declared at the subflow inputs",
inputKey,
currentExecution.getId(),
currentTask.subflowId().namespace(),
currentTask.subflowId().flowId()
);
}
}
}
// inject the traceparent into the new execution
propagator.ifPresent(pg -> pg.inject(Context.current(), execution, ExecutionTextMapSetter.INSTANCE));

View File

@@ -49,7 +49,15 @@ import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalTime;
import java.util.*;
import java.util.AbstractMap;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.regex.Matcher;
@@ -223,19 +231,6 @@ public class FlowInputOutput {
return new AbstractMap.SimpleEntry<>(it.input().getId(), it.value());
})
.collect(HashMap::new, (m,v)-> m.put(v.getKey(), v.getValue()), HashMap::putAll);
if (resolved.size() < data.size()) {
RunContext runContext = runContextFactory.of(flow, execution);
for (var inputKey : data.keySet()) {
if (!resolved.containsKey(inputKey)) {
runContext.logger().warn(
"Input {} was provided for workflow {}.{} but isn't declared in the workflow inputs",
inputKey,
flow.getNamespace(),
flow.getId()
);
}
}
}
return MapUtils.flattenToNestedMap(resolved);
}
@@ -318,15 +313,15 @@ public class FlowInputOutput {
});
resolvable.setInput(input);
Object value = resolvable.get().value();
// resolve default if needed
if (value == null && input.getDefaults() != null) {
value = resolveDefaultValue(input, runContext);
resolvable.isDefault(true);
}
// validate and parse input value
if (value == null) {
if (input.getRequired()) {
@@ -355,7 +350,7 @@ public class FlowInputOutput {
return resolvable.get();
}
public static Object resolveDefaultValue(Input<?> input, PropertyContext renderer) throws IllegalVariableEvaluationException {
return switch (input.getType()) {
case STRING, ENUM, SELECT, SECRET, EMAIL -> resolveDefaultPropertyAs(input, renderer, String.class);
@@ -372,7 +367,7 @@ public class FlowInputOutput {
case MULTISELECT -> resolveDefaultPropertyAsList(input, renderer, String.class);
};
}
@SuppressWarnings("unchecked")
private static <T> Object resolveDefaultPropertyAs(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.as((Property<T>) input.getDefaults(), renderer, clazz);
@@ -381,7 +376,7 @@ public class FlowInputOutput {
private static <T> Object resolveDefaultPropertyAsList(Input<?> input, PropertyContext renderer, Class<T> clazz) throws IllegalVariableEvaluationException {
return Property.asList((Property<List<T>>) input.getDefaults(), renderer, clazz);
}
private RunContext buildRunContextForExecutionAndInputs(final FlowInterface flow, final Execution execution, Map<String, InputAndValue> dependencies) {
Map<String, Object> flattenInputs = MapUtils.flattenToNestedMap(dependencies.entrySet()
.stream()
@@ -458,7 +453,7 @@ public class FlowInputOutput {
if (data.getType() == null) {
return Optional.of(new AbstractMap.SimpleEntry<>(data.getId(), current));
}
final Type elementType = data instanceof ItemTypeInterface itemTypeInterface ? itemTypeInterface.getItemType() : null;
return Optional.of(new AbstractMap.SimpleEntry<>(
@@ -535,17 +530,17 @@ public class FlowInputOutput {
throw new Exception("Expected `" + type + "` but received `" + current + "` with errors:\n```\n" + e.getMessage() + "\n```");
}
}
public static Map<String, Object> renderFlowOutputs(List<Output> outputs, RunContext runContext) throws IllegalVariableEvaluationException {
if (outputs == null) return Map.of();
// render required outputs
Map<String, Object> outputsById = outputs
.stream()
.filter(output -> output.getRequired() == null || output.getRequired())
.collect(HashMap::new, (map, entry) -> map.put(entry.getId(), entry.getValue()), Map::putAll);
outputsById = runContext.render(outputsById);
// render optional outputs one by one to catch, log, and skip any error.
for (io.kestra.core.models.flows.Output output : outputs) {
if (Boolean.FALSE.equals(output.getRequired())) {
@@ -588,9 +583,9 @@ public class FlowInputOutput {
}
public void isDefault(boolean isDefault) {
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exception());
this.input = new InputAndValue(this.input.input(), this.input.value(), this.input.enabled(), isDefault, this.input.exception());
}
public void setInput(final Input<?> input) {
this.input = new InputAndValue(input, this.input.value(), this.input.enabled(), this.input.isDefault(), this.input.exception());
}

View File

@@ -500,7 +500,7 @@ public class FlowableUtils {
ArrayList<ResolvedTask> result = new ArrayList<>();
int iteration = 0;
int index = 0;
for (Object current : distinctValue) {
try {
String resolvedValue = current instanceof String stringValue ? stringValue : MAPPER.writeValueAsString(current);
@@ -508,7 +508,7 @@ public class FlowableUtils {
result.add(ResolvedTask.builder()
.task(task)
.value(resolvedValue)
.iteration(iteration)
.iteration(index++)
.parentId(parentTaskRun.getId())
.build()
);
@@ -516,7 +516,6 @@ public class FlowableUtils {
} catch (JsonProcessingException e) {
throw new IllegalVariableEvaluationException(e);
}
iteration++;
}
return result;

View File

@@ -45,7 +45,7 @@ final class Secret {
for (var entry: data.entrySet()) {
if (entry.getValue() instanceof Map map) {
// if some value are of type EncryptedString we decode them and replace the object
if (map.get("type") instanceof String typeStr && EncryptedString.TYPE.equalsIgnoreCase(typeStr)) {
if (EncryptedString.TYPE.equalsIgnoreCase((String)map.get("type"))) {
try {
String decoded = decrypt((String) map.get("value"));
decryptedMap.put(entry.getKey(), decoded);

View File

@@ -168,7 +168,6 @@ public class Extension extends AbstractExtension {
functions.put("randomPort", new RandomPortFunction());
functions.put("fileExists", fileExistsFunction);
functions.put("isFileEmpty", isFileEmptyFunction);
functions.put("nanoId", new NanoIDFunction());
functions.put("tasksWithState", new TasksWithStateFunction());
functions.put(HttpFunction.NAME, httpFunction);
return functions;

View File

@@ -30,6 +30,6 @@ public class TimestampMicroFilter extends AbstractDate implements Filter {
ZoneId zoneId = zoneId(timeZone);
ZonedDateTime date = convert(input, zoneId, existingFormat);
return String.valueOf(TimeUnit.SECONDS.toMicros(date.toEpochSecond()) + TimeUnit.NANOSECONDS.toMicros(date.getNano()));
return String.valueOf(TimeUnit.SECONDS.toNanos(date.toEpochSecond()) + TimeUnit.NANOSECONDS.toMicros(date.getNano()));
}
}

View File

@@ -1,66 +0,0 @@
package io.kestra.core.runners.pebble.functions;
import io.pebbletemplates.pebble.error.PebbleException;
import io.pebbletemplates.pebble.extension.Function;
import io.pebbletemplates.pebble.template.EvaluationContext;
import io.pebbletemplates.pebble.template.PebbleTemplate;
import java.security.SecureRandom;
import java.util.List;
import java.util.Map;
public class NanoIDFunction implements Function {
private static final int DEFAULT_LENGTH = 21;
private static final char[] DEFAULT_ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_".toCharArray();
private static final SecureRandom secureRandom = new SecureRandom();
private static final String LENGTH = "length";
private static final String ALPHABET = "alphabet";
private static final int MAX_LENGTH = 1000;
@Override
public Object execute(
Map<String, Object> args, PebbleTemplate self, EvaluationContext context, int lineNumber) {
int length = DEFAULT_LENGTH;
if (args.containsKey(LENGTH) && (args.get(LENGTH) instanceof Long)) {
length = parseLength(args, self, lineNumber);
}
char[] alphabet = DEFAULT_ALPHABET;
if (args.containsKey(ALPHABET) && (args.get(ALPHABET) instanceof String)) {
alphabet = ((String) args.get(ALPHABET)).toCharArray();
}
return createNanoID(length, alphabet);
}
private static int parseLength(Map<String, Object> args, PebbleTemplate self, int lineNumber) {
var value = (Long) args.get(LENGTH);
if(value > MAX_LENGTH) {
throw new PebbleException(
null,
"The 'nanoId()' function field 'length' must be lower than: " + MAX_LENGTH,
lineNumber,
self.getName());
}
return Math.toIntExact(value);
}
@Override
public List<String> getArgumentNames() {
return List.of(LENGTH,ALPHABET);
}
String createNanoID(int length, char[] alphabet){
final char[] data = new char[length];
final byte[] bytes = new byte[length];
final int mask = alphabet.length-1;
secureRandom.nextBytes(bytes);
for (int i = 0; i < length; ++i) {
data[i] = alphabet[bytes[i] & mask];
}
return String.valueOf(data);
}
}

View File

@@ -163,21 +163,21 @@ public final class JacksonMapper {
.build();
}
public static Pair<JsonNode, JsonNode> getBiDirectionalDiffs(Object before, Object after) {
JsonNode beforeNode = MAPPER.valueToTree(before);
JsonNode afterNode = MAPPER.valueToTree(after);
public static Pair<JsonNode, JsonNode> getBiDirectionalDiffs(Object previous, Object current) {
JsonNode previousJson = MAPPER.valueToTree(previous);
JsonNode newJson = MAPPER.valueToTree(current);
JsonNode patch = JsonDiff.asJson(beforeNode, afterNode);
JsonNode revert = JsonDiff.asJson(afterNode, beforeNode);
JsonNode patchPrevToNew = JsonDiff.asJson(previousJson, newJson);
JsonNode patchNewToPrev = JsonDiff.asJson(newJson, previousJson);
return Pair.of(patch, revert);
return Pair.of(patchPrevToNew, patchNewToPrev);
}
public static JsonNode applyPatchesOnJsonNode(JsonNode jsonObject, List<JsonNode> patches) {
for (JsonNode patch : patches) {
try {
// Required for ES
if (patch.findValue("value") == null && !patch.isEmpty()) {
if (patch.findValue("value") == null) {
((ObjectNode) patch.get(0)).set("value", null);
}
jsonObject = JsonPatch.fromJson(patch).apply(jsonObject);

View File

@@ -1,6 +1,5 @@
package io.kestra.core.storages.kv;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import java.time.Duration;
@@ -10,7 +9,6 @@ import java.util.Map;
import java.util.Optional;
@Getter
@EqualsAndHashCode
public class KVMetadata {
private String description;
private Instant expirationDate;
@@ -19,18 +17,14 @@ public class KVMetadata {
if (ttl != null && ttl.isNegative()) {
throw new IllegalArgumentException("ttl cannot be negative");
}
this.description = description;
if (ttl != null) {
this.expirationDate = Instant.now().plus(ttl);
}
}
public KVMetadata(String description, Instant expirationDate) {
this.description = description;
this.expirationDate = expirationDate;
}
public KVMetadata(Map<String, String> metadata) {
if (metadata == null) {
return;
@@ -52,9 +46,4 @@ public class KVMetadata {
}
return map;
}
@Override
public String toString() {
return "[description=" + description + ", expirationDate=" + expirationDate + "]";
}
}

View File

@@ -4,9 +4,7 @@ import io.kestra.core.exceptions.ResourceExpiredException;
import io.kestra.core.storages.StorageContext;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URI;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
import java.util.regex.Pattern;
@@ -106,33 +104,8 @@ public interface KVStore {
default boolean exists(String key) throws IOException {
return list().stream().anyMatch(kvEntry -> kvEntry.key().equals(key));
}
/**
* Finds a KV entry with associated metadata for a given key.
*
* @param key the KV entry key.
* @return an optional of {@link KVValueAndMetadata}.
*
* @throws UncheckedIOException if an error occurred while executing the operation on the K/V store.
*/
default Optional<KVValueAndMetadata> findMetadataAndValue(final String key) throws UncheckedIOException {
try {
return get(key).flatMap(entry ->
{
try {
return getValue(entry.key()).map(current -> new KVValueAndMetadata(new KVMetadata(entry.description(), entry.expirationDate()), current.value()));
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (ResourceExpiredException e) {
return Optional.empty();
}
}
);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
Pattern KEY_VALIDATOR_PATTERN = Pattern.compile("[a-zA-Z0-9][a-zA-Z0-9._-]*");
/**

View File

@@ -51,8 +51,8 @@ public class Version implements Comparable<Version> {
strVersion.split("\\.");
try {
final int majorVersion = Integer.parseInt(versions[0]);
final Integer minorVersion = versions.length > 1 ? Integer.parseInt(versions[1]) : null;
final Integer incrementalVersion = versions.length > 2 ? Integer.parseInt(versions[2]) : null;
final int minorVersion = versions.length > 1 ? Integer.parseInt(versions[1]) : 0;
final int incrementalVersion = versions.length > 2 ? Integer.parseInt(versions[2]) : 0;
return new Version(
majorVersion,
@@ -67,48 +67,44 @@ public class Version implements Comparable<Version> {
}
/**
* Resolves the most appropriate stable version from a collection, based on a given input version.
* <p>
* The matching rules are:
* <ul>
* <li>If {@code from} specifies only a major version (e.g. {@code 1}), return the latest stable version
* with the same major (e.g. {@code 1.2.3}).</li>
* <li>If {@code from} specifies a major and minor version only (e.g. {@code 1.2}), return the latest
* stable version with the same major and minor (e.g. {@code 1.2.3}).</li>
* <li>If {@code from} specifies a full version with major, minor, and patch (e.g. {@code 1.2.2}),
* then only return it if it is exactly present (and stable) in {@code versions}.
* No "upgrade" is performed in this case.</li>
* <li>If no suitable version is found, returns {@code null}.</li>
* </ul>
* Returns the most recent stable version compatible with the given {@link Version}.
*
* @param from the reference version (may specify only major, or major+minor, or major+minor+patch).
* @param versions the collection of candidate versions to resolve against.
* @return the best matching stable version, or {@code null} if none match.
* <p>Resolution strategy:</p>
* <ol>
* <li>If {@code from} is present in {@code versions}, return it directly.</li>
* <li>Otherwise, return the latest version with the same major and minor.</li>
* <li>If none found and {@code from.majorVersion() > 0}, return the latest with the same major.</li>
* <li>Return {@code null} if no compatible version is found.</li>
* </ol>
*
* @param from the current version
* @param versions available versions
* @return the most recent compatible stable version, or {@code null} if none
*/
public static Version getStable(final Version from, final Collection<Version> versions) {
// Case 1: "from" is only a major (e.g. 1)
if (from.hasOnlyMajor()) {
List<Version> sameMajor = versions.stream()
.filter(v -> v.majorVersion() == from.majorVersion())
.toList();
return sameMajor.isEmpty() ? null : Version.getLatest(sameMajor);
}
// Case 2: "from" is major+minor only (e.g. 1.2)
if (from.hasMajorAndMinorOnly()) {
List<Version> sameMinor = versions.stream()
.filter(v -> v.majorVersion() == from.majorVersion()
&& v.minorVersion() == from.minorVersion())
.toList();
return sameMinor.isEmpty() ? null : Version.getLatest(sameMinor);
}
// Case 3: "from" is full version (major+minor+patch)
if (versions.contains(from)) {
return from;
}
// No match
// Prefer same major+minor stable versions
List<Version> sameMinorStable = versions.stream()
.filter(v -> v.majorVersion() == from.majorVersion() && v.minorVersion() == from.minorVersion())
.toList();
if (!sameMinorStable.isEmpty()) {
return Version.getLatest(sameMinorStable);
}
if (from.majorVersion() > 0) {
// Fallback: any stable version with the same major
List<Version> sameMajorStable = versions.stream()
.filter(v -> v.majorVersion() == from.majorVersion())
.toList();
if (!sameMajorStable.isEmpty()) {
return Version.getLatest(sameMajorStable);
}
}
return null;
}
@@ -159,8 +155,8 @@ public class Version implements Comparable<Version> {
}
private final int majorVersion;
private final Integer minorVersion;
private final Integer patchVersion;
private final int minorVersion;
private final int incrementalVersion;
private final Qualifier qualifier;
private final String originalVersion;
@@ -170,14 +166,14 @@ public class Version implements Comparable<Version> {
*
* @param majorVersion the major version (must be superior or equal to 0).
* @param minorVersion the minor version (must be superior or equal to 0).
* @param patchVersion the incremental version (must be superior or equal to 0).
* @param incrementalVersion the incremental version (must be superior or equal to 0).
* @param qualifier the qualifier.
*/
public Version(final int majorVersion,
final int minorVersion,
final int patchVersion,
final int incrementalVersion,
final String qualifier) {
this(majorVersion, minorVersion, patchVersion, qualifier, null);
this(majorVersion, minorVersion, incrementalVersion, qualifier, null);
}
/**
@@ -185,25 +181,25 @@ public class Version implements Comparable<Version> {
*
* @param majorVersion the major version (must be superior or equal to 0).
* @param minorVersion the minor version (must be superior or equal to 0).
* @param patchVersion the incremental version (must be superior or equal to 0).
* @param incrementalVersion the incremental version (must be superior or equal to 0).
* @param qualifier the qualifier.
* @param originalVersion the original string version.
*/
private Version(final Integer majorVersion,
final Integer minorVersion,
final Integer patchVersion,
private Version(final int majorVersion,
final int minorVersion,
final int incrementalVersion,
final String qualifier,
final String originalVersion) {
this.majorVersion = requirePositive(majorVersion, "major");
this.minorVersion = requirePositive(minorVersion, "minor");
this.patchVersion = requirePositive(patchVersion, "incremental");
this.incrementalVersion = requirePositive(incrementalVersion, "incremental");
this.qualifier = qualifier != null ? new Qualifier(qualifier) : null;
this.originalVersion = originalVersion;
}
private static Integer requirePositive(Integer version, final String message) {
if (version != null && version < 0) {
private static int requirePositive(int version, final String message) {
if (version < 0) {
throw new IllegalArgumentException(String.format("The '%s' version must super or equal to 0", message));
}
return version;
@@ -214,11 +210,11 @@ public class Version implements Comparable<Version> {
}
public int minorVersion() {
return minorVersion != null ? minorVersion : 0;
return minorVersion;
}
public int patchVersion() {
return patchVersion != null ? patchVersion : 0;
public int incrementalVersion() {
return incrementalVersion;
}
public Qualifier qualifier() {
@@ -233,9 +229,9 @@ public class Version implements Comparable<Version> {
if (this == o) return true;
if (!(o instanceof Version)) return false;
Version version = (Version) o;
return Objects.equals(majorVersion,version.majorVersion) &&
Objects.equals(minorVersion, version.minorVersion) &&
Objects.equals(patchVersion,version.patchVersion) &&
return majorVersion == version.majorVersion &&
minorVersion == version.minorVersion &&
incrementalVersion == version.incrementalVersion &&
Objects.equals(qualifier, version.qualifier);
}
@@ -244,7 +240,7 @@ public class Version implements Comparable<Version> {
*/
@Override
public int hashCode() {
return Objects.hash(majorVersion, minorVersion, patchVersion, qualifier);
return Objects.hash(majorVersion, minorVersion, incrementalVersion, qualifier);
}
/**
@@ -254,7 +250,7 @@ public class Version implements Comparable<Version> {
public String toString() {
if (originalVersion != null) return originalVersion;
String version = majorVersion + "." + minorVersion + "." + patchVersion;
String version = majorVersion + "." + minorVersion + "." + incrementalVersion;
return (qualifier != null) ? version +"-" + qualifier : version;
}
@@ -274,7 +270,7 @@ public class Version implements Comparable<Version> {
return compareMinor;
}
int compareIncremental = Integer.compare(that.patchVersion, this.patchVersion);
int compareIncremental = Integer.compare(that.incrementalVersion, this.incrementalVersion);
if (compareIncremental != 0) {
return compareIncremental;
}
@@ -289,21 +285,6 @@ public class Version implements Comparable<Version> {
return this.qualifier.compareTo(that.qualifier);
}
/**
* @return true if only major is specified (e.g. "1")
*/
private boolean hasOnlyMajor() {
return minorVersion == null && patchVersion == null;
}
/**
* @return true if major+minor are specified, but no patch (e.g. "1.2")
*/
private boolean hasMajorAndMinorOnly() {
return minorVersion != null && patchVersion == null;
}
/**
* Checks whether this version is before the given one.

View File

@@ -46,19 +46,16 @@ public class VersionProvider {
this.date = loadTime(gitProperties);
this.version = loadVersion(buildProperties, gitProperties);
// check the version in the settings and update if needed, we didn't use it would allow us to detect incompatible update later if needed
settingRepository.ifPresent(
settingRepositoryInterface -> persistVersion(settingRepositoryInterface, version));
}
private static synchronized void persistVersion(SettingRepositoryInterface settingRepositoryInterface, String version) {
Optional<Setting> versionSetting = settingRepositoryInterface.findByKey(Setting.INSTANCE_VERSION);
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(version)) {
settingRepositoryInterface.save(Setting.builder()
.key(Setting.INSTANCE_VERSION)
.value(version)
.build()
);
// check the version in the settings and update if needed, we did't use it would allow us to detect incompatible update later if needed
if (settingRepository.isPresent()) {
Optional<Setting> versionSetting = settingRepository.get().findByKey(Setting.INSTANCE_VERSION);
if (versionSetting.isEmpty() || !versionSetting.get().getValue().equals(this.version)) {
settingRepository.get().save(Setting.builder()
.key(Setting.INSTANCE_VERSION)
.value(this.version)
.build()
);
}
}
}

View File

@@ -21,7 +21,6 @@ import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotNull;
import lombok.*;
import lombok.experimental.SuperBuilder;
import lombok.extern.slf4j.Slf4j;
import java.util.Optional;
@@ -69,7 +68,6 @@ import java.util.Optional;
)
}
)
@Slf4j
public class Exit extends Task implements ExecutionUpdatableTask {
@NotNull
@Schema(
@@ -106,13 +104,12 @@ public class Exit extends Task implements ExecutionUpdatableTask {
// ends all parents
while (newTaskRun.getParentTaskRunId() != null) {
newTaskRun = newExecution.findTaskRunByTaskRunId(newTaskRun.getParentTaskRunId()).withState(exitState);
newExecution = newExecution.withTaskRun(newTaskRun);
newExecution = execution.withTaskRun(newTaskRun);
}
return newExecution;
} catch (InternalException e) {
// in case we cannot update the last not terminated task run, we ignore it
log.warn("Unable to update the taskrun state", e);
return execution.withState(exitState);
return execution;
}
})
.orElse(execution)

View File

@@ -155,7 +155,6 @@ public class Labels extends Task implements ExecutionUpdatableTask {
newLabels.putAll(labelsAsMap);
return execution.withLabels(newLabels.entrySet().stream()
.filter(Label.getEntryNotEmptyPredicate())
.map(entry -> new Label(
entry.getKey(),
entry.getValue()

View File

@@ -216,46 +216,49 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
VariablesService variablesService = ((DefaultRunContext) runContext).getApplicationContext().getBean(VariablesService.class);
if (this.wait) { // we only compute outputs if we wait for the subflow
List<io.kestra.core.models.flows.Output> subflowOutputs = flow.getOutputs();
boolean isOutputsAllowed = runContext
.<Boolean>pluginConfiguration(PLUGIN_FLOW_OUTPUTS_ENABLED)
.orElse(true);
List<io.kestra.core.models.flows.Output> subflowOutputs = flow.getOutputs();
// region [deprecated] Subflow outputs feature
if (subflowOutputs == null && this.getOutputs() != null) {
boolean isOutputsAllowed = runContext
.<Boolean>pluginConfiguration(PLUGIN_FLOW_OUTPUTS_ENABLED)
.orElse(true);
if (isOutputsAllowed) {
try {
subflowOutputs = this.getOutputs().entrySet().stream()
.<io.kestra.core.models.flows.Output>map(entry -> io.kestra.core.models.flows.Output
.builder()
.id(entry.getKey())
.value(entry.getValue())
.required(true)
.build()
)
.toList();
} catch (Exception e) {
Variables variables = variablesService.of(StorageContext.forTask(taskRun), builder.build());
return failSubflowDueToOutput(runContext, taskRun, execution, e, variables);
}
} else {
runContext.logger().warn("Defining outputs inside the Subflow task is not allowed.");
}
if (subflowOutputs == null && isOutputsAllowed && this.getOutputs() != null) {
subflowOutputs = this.getOutputs().entrySet().stream()
.<io.kestra.core.models.flows.Output>map(entry -> io.kestra.core.models.flows.Output
.builder()
.id(entry.getKey())
.value(entry.getValue())
.required(true)
.build()
)
.toList();
}
//endregion
if (subflowOutputs != null && !subflowOutputs.isEmpty()) {
try {
Map<String, Object> rOutputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
Map<String, Object> outputs = FlowInputOutput.renderFlowOutputs(subflowOutputs, runContext);
FlowInputOutput flowInputOutput = ((DefaultRunContext)runContext).getApplicationContext().getBean(FlowInputOutput.class); // this is hacking
if (flow.getOutputs() != null && flowInputOutput != null) {
rOutputs = flowInputOutput.typedOutputs(flow, execution, rOutputs);
outputs = flowInputOutput.typedOutputs(flow, execution, outputs);
}
builder.outputs(rOutputs);
builder.outputs(outputs);
} catch (Exception e) {
runContext.logger().warn("Failed to extract outputs with the error: '{}'", e.getLocalizedMessage(), e);
var state = State.Type.fail(this);
Variables variables = variablesService.of(StorageContext.forTask(taskRun), builder.build());
return failSubflowDueToOutput(runContext, taskRun, execution, e, variables);
taskRun = taskRun
.withState(state)
.withAttempts(Collections.singletonList(TaskRunAttempt.builder().state(new State().withState(state)).build()))
.withOutputs(variables);
return Optional.of(SubflowExecutionResult.builder()
.executionId(execution.getId())
.state(State.Type.FAILED)
.parentTaskRun(taskRun)
.build());
}
}
}
@@ -279,21 +282,6 @@ public class Subflow extends Task implements ExecutableTask<Subflow.Output>, Chi
return Optional.of(ExecutableUtils.subflowExecutionResult(taskRun, execution));
}
private Optional<SubflowExecutionResult> failSubflowDueToOutput(RunContext runContext, TaskRun taskRun, Execution execution, Exception e, Variables outputs) {
runContext.logger().error("Failed to extract outputs with the error: '{}'", e.getLocalizedMessage(), e);
var state = State.Type.fail(this);
taskRun = taskRun
.withState(state)
.withAttempts(Collections.singletonList(TaskRunAttempt.builder().state(new State().withState(state)).build()))
.withOutputs(outputs);
return Optional.of(SubflowExecutionResult.builder()
.executionId(execution.getId())
.state(State.Type.FAILED)
.parentTaskRun(taskRun)
.build());
}
@Override
public boolean waitForExecution() {
return this.wait;

View File

@@ -1,32 +1,19 @@
package io.kestra.core.models;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.validations.ModelValidator;
import jakarta.inject.Inject;
import jakarta.validation.ConstraintViolationException;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
class LabelTest {
@Inject
private ModelValidator modelValidator;
@Test
void shouldGetNestedMapGivenDistinctLabels() {
Map<String, Object> result = Label.toNestedMap(List.of(
new Label(Label.USERNAME, "test"),
new Label(Label.CORRELATION_ID, "id"),
new Label("", "bar"),
new Label(null, "bar"),
new Label("foo", ""),
new Label("baz", null)
)
new Label(Label.CORRELATION_ID, "id"))
);
assertThat(result).isEqualTo(
@@ -47,18 +34,6 @@ class LabelTest {
);
}
@Test
void toNestedMapShouldIgnoreEmptyOrNull() {
Map<String, Object> result = Label.toNestedMap(List.of(
new Label("", "bar"),
new Label(null, "bar"),
new Label("foo", ""),
new Label("baz", null))
);
assertThat(result).isEmpty();
}
@Test
void shouldGetMapGivenDistinctLabels() {
Map<String, String> result = Label.toMap(List.of(
@@ -84,18 +59,6 @@ class LabelTest {
);
}
@Test
void toMapShouldIgnoreEmptyOrNull() {
Map<String, String> result = Label.toMap(List.of(
new Label("", "bar"),
new Label(null, "bar"),
new Label("foo", ""),
new Label("baz", null))
);
assertThat(result).isEmpty();
}
@Test
void shouldDuplicateLabelsWithKeyOrderKept() {
List<Label> result = Label.deduplicate(List.of(
@@ -110,28 +73,4 @@ class LabelTest {
new Label(Label.CORRELATION_ID, "id")
);
}
@Test
void deduplicateShouldIgnoreEmptyAndNull() {
List<Label> result = Label.deduplicate(List.of(
new Label("", "bar"),
new Label(null, "bar"),
new Label("foo", ""),
new Label("baz", null))
);
assertThat(result).isEmpty();
}
@Test
void shouldValidateEmpty() {
Optional<ConstraintViolationException> validLabelResult = modelValidator.isValid(new Label("foo", "bar"));
assertThat(validLabelResult.isPresent()).isFalse();
Optional<ConstraintViolationException> emptyValueLabelResult = modelValidator.isValid(new Label("foo", ""));
assertThat(emptyValueLabelResult.isPresent()).isTrue();
Optional<ConstraintViolationException> emptyKeyLabelResult = modelValidator.isValid(new Label("", "bar"));
assertThat(emptyKeyLabelResult.isPresent()).isTrue();
}
}

View File

@@ -7,11 +7,12 @@ import io.kestra.core.junit.annotations.ExecuteFlow;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.queues.QueueException;
import io.kestra.core.repositories.TriggerRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.services.GraphService;
import io.kestra.core.utils.GraphUtils;
@@ -44,7 +45,7 @@ class FlowGraphTest {
private TriggerRepositoryInterface triggerRepositoryInterface;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
void simple() throws IllegalVariableEvaluationException, IOException {

View File

@@ -13,20 +13,21 @@ import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
public abstract class AbstractFeatureUsageReportTest {
@Inject
FeatureUsageReport featureUsageReport;
@Test
public void shouldGetReport() {
// When
Instant now = Instant.now();
FeatureUsageReport.UsageEvent event = featureUsageReport.report(
now,
now,
Reportable.TimeInterval.of(now.minus(Duration.ofDays(1)).atZone(ZoneId.systemDefault()), now.atZone(ZoneId.systemDefault()))
);
// Then
assertThat(event.getExecutions().getDailyExecutionsCount().size()).isGreaterThan(0);
assertThat(event.getExecutions().getDailyTaskRunsCount()).isNull();
}
}

View File

@@ -344,6 +344,27 @@ public abstract class AbstractExecutionRepositoryTest {
assertThat(executions.getTotal()).isEqualTo(8L);
}
@Test
protected void findTaskRun() {
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
inject(tenant);
ArrayListTotal<TaskRun> taskRuns = executionRepository.findTaskRun(Pageable.from(1, 10), tenant, null);
assertThat(taskRuns.getTotal()).isEqualTo(74L);
assertThat(taskRuns.size()).isEqualTo(10);
var filters = List.of(QueryFilter.builder()
.field(QueryFilter.Field.LABELS)
.operation(QueryFilter.Op.EQUALS)
.value(Map.of("key", "value"))
.build());
taskRuns = executionRepository.findTaskRun(Pageable.from(1, 10), tenant, filters);
assertThat(taskRuns.getTotal()).isEqualTo(1L);
assertThat(taskRuns.size()).isEqualTo(1);
}
@Test
protected void findById() {
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
@@ -442,7 +463,8 @@ public abstract class AbstractExecutionRepositoryTest {
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null);
null,
false);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().size()).isEqualTo(11);
@@ -461,7 +483,8 @@ public abstract class AbstractExecutionRepositoryTest {
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null);
null,
false);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(21L);
@@ -475,7 +498,8 @@ public abstract class AbstractExecutionRepositoryTest {
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null);
null,
false);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(20L);
@@ -488,11 +512,93 @@ public abstract class AbstractExecutionRepositoryTest {
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null);
null,
false);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(1L);
}
@Test
protected void taskRunsDailyStatistics() {
var tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
for (int i = 0; i < 28; i++) {
executionRepository.save(builder(
tenant,
i < 5 ? State.Type.RUNNING : (i < 8 ? State.Type.FAILED : State.Type.SUCCESS),
i < 15 ? null : "second"
).build());
}
executionRepository.save(builder(
tenant,
State.Type.SUCCESS,
"second"
).namespace(NamespaceUtils.SYSTEM_FLOWS_DEFAULT_NAMESPACE).build());
List<DailyExecutionStatistics> result = executionRepository.dailyStatistics(
null,
tenant,
null,
null,
null,
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null,
true);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().size()).isEqualTo(11);
assertThat(result.get(10).getDuration().getAvg().toMillis()).isGreaterThan(0L);
assertThat(result.get(10).getExecutionCounts().get(State.Type.FAILED)).isEqualTo(3L * 2);
assertThat(result.get(10).getExecutionCounts().get(State.Type.RUNNING)).isEqualTo(5L * 2);
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(57L);
result = executionRepository.dailyStatistics(
null,
tenant,
List.of(FlowScope.USER, FlowScope.SYSTEM),
null,
null,
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null,
true);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(57L);
result = executionRepository.dailyStatistics(
null,
tenant,
List.of(FlowScope.USER),
null,
null,
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null,
true);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(55L);
result = executionRepository.dailyStatistics(
null,
tenant,
List.of(FlowScope.SYSTEM),
null,
null,
ZonedDateTime.now().minusDays(10),
ZonedDateTime.now(),
null,
null,
true);
assertThat(result.size()).isEqualTo(11);
assertThat(result.get(10).getExecutionCounts().get(State.Type.SUCCESS)).isEqualTo(2L);
}
@SuppressWarnings("OptionalGetWithoutIsPresent")
@Test
protected void executionsCount() throws InterruptedException {

View File

@@ -0,0 +1,281 @@
package io.kestra.core.repositories;
import static org.assertj.core.api.Assertions.assertThat;
import io.kestra.core.Helpers;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.QueryFilter;
import io.kestra.core.models.SearchResult;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.utils.TestsUtils;
import io.micronaut.data.model.Pageable;
import io.micronaut.data.model.Sort;
import jakarta.inject.Inject;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@KestraTest
public abstract class AbstractLoadedFlowRepositoryTest {
@Inject
protected FlowRepositoryInterface flowRepository;
@Inject
protected ExecutionRepositoryInterface executionRepository;
@Inject
private LocalFlowRepositoryLoader repositoryLoader;
protected static final String TENANT = TestsUtils.randomTenant(AbstractLoadedFlowRepositoryTest.class.getSimpleName());
private static final AtomicBoolean IS_INIT = new AtomicBoolean();
@BeforeEach
protected synchronized void init() throws IOException, URISyntaxException {
initFlows(repositoryLoader);
}
protected static synchronized void initFlows(LocalFlowRepositoryLoader repo) throws IOException, URISyntaxException {
if (!IS_INIT.get()){
TestsUtils.loads(TENANT, repo);
IS_INIT.set(true);
}
}
@Test
void findAll() {
List<Flow> save = flowRepository.findAll(TENANT);
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
}
@Test
void findAllWithSource() {
List<FlowWithSource> save = flowRepository.findAllWithSource(TENANT);
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
}
@Test
void findAllForAllTenants() {
List<Flow> save = flowRepository.findAllForAllTenants();
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
}
@Test
void findAllWithSourceForAllTenants() {
List<FlowWithSource> save = flowRepository.findAllWithSourceForAllTenants();
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT);
}
@Test
void findByNamespace() {
List<Flow> save = flowRepository.findByNamespace(TENANT, "io.kestra.tests");
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT - 24);
save = flowRepository.findByNamespace(TENANT, "io.kestra.tests2");
assertThat((long) save.size()).isEqualTo(1L);
save = flowRepository.findByNamespace(TENANT, "io.kestra.tests.minimal.bis");
assertThat((long) save.size()).isEqualTo(1L);
}
@Test
void findByNamespacePrefix() {
List<Flow> save = flowRepository.findByNamespacePrefix(TENANT, "io.kestra.tests");
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT - 1);
save = flowRepository.findByNamespace(TENANT, "io.kestra.tests2");
assertThat((long) save.size()).isEqualTo(1L);
save = flowRepository.findByNamespace(TENANT, "io.kestra.tests.minimal.bis");
assertThat((long) save.size()).isEqualTo(1L);
}
@Test
void findByNamespacePrefixWithSource() {
List<FlowWithSource> save = flowRepository.findByNamespacePrefixWithSource(TENANT, "io.kestra.tests");
assertThat((long) save.size()).isEqualTo(Helpers.FLOWS_COUNT - 1);
}
@Test
void find_paginationPartial() {
assertThat(flowRepository.find(Pageable.from(1, (int) Helpers.FLOWS_COUNT - 1, Sort.UNSORTED), TENANT, null)
.size())
.describedAs("When paginating at MAX-1, it should return MAX-1")
.isEqualTo(Helpers.FLOWS_COUNT - 1);
assertThat(flowRepository.findWithSource(Pageable.from(1, (int) Helpers.FLOWS_COUNT - 1, Sort.UNSORTED), TENANT, null)
.size())
.describedAs("When paginating at MAX-1, it should return MAX-1")
.isEqualTo(Helpers.FLOWS_COUNT - 1);
}
@Test
void find_paginationGreaterThanExisting() {
assertThat(flowRepository.find(Pageable.from(1, (int) Helpers.FLOWS_COUNT + 1, Sort.UNSORTED), TENANT, null)
.size())
.describedAs("When paginating requesting a larger amount than existing, it should return existing MAX")
.isEqualTo(Helpers.FLOWS_COUNT);
assertThat(flowRepository.findWithSource(Pageable.from(1, (int) Helpers.FLOWS_COUNT + 1, Sort.UNSORTED), TENANT, null)
.size())
.describedAs("When paginating requesting a larger amount than existing, it should return existing MAX")
.isEqualTo(Helpers.FLOWS_COUNT);
}
@Test
void find_prefixMatchingAllNamespaces() {
assertThat(flowRepository.find(
Pageable.UNPAGED,
TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.STARTS_WITH).value("io.kestra.tests").build()
)
).size())
.describedAs("When filtering on NAMESPACE START_WITH a pattern that match all, it should return all")
.isEqualTo(Helpers.FLOWS_COUNT);
assertThat(flowRepository.findWithSource(
Pageable.UNPAGED,
TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.STARTS_WITH).value("io.kestra.tests").build()
)
).size())
.describedAs("When filtering on NAMESPACE START_WITH a pattern that match all, it should return all")
.isEqualTo(Helpers.FLOWS_COUNT);
}
@Test
void find_aSpecifiedNamespace() {
assertThat(flowRepository.find(
Pageable.UNPAGED,
TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests2").build()
)
).size()).isEqualTo(1L);
assertThat(flowRepository.findWithSource(
Pageable.UNPAGED,
TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests2").build()
)
).size()).isEqualTo(1L);
}
@Test
void find_aSpecificSubNamespace() {
assertThat(flowRepository.find(
Pageable.UNPAGED,
TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests.minimal.bis").build()
)
).size())
.isEqualTo(1L);
assertThat(flowRepository.findWithSource(
Pageable.UNPAGED,
TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests.minimal.bis").build()
)
).size())
.isEqualTo(1L);
}
@Test
void find_aSpecificLabel() {
assertThat(
flowRepository.find(Pageable.UNPAGED, TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(
Map.of("country", "FR")).build()
)
).size())
.isEqualTo(1);
assertThat(
flowRepository.findWithSource(Pageable.UNPAGED, TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("country", "FR")).build()
)
).size())
.isEqualTo(1);
}
@Test
void find_aSpecificFlowByNamespaceAndLabel() {
assertThat(
flowRepository.find(Pageable.UNPAGED, TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key2", "value2")).build()
)
).size())
.isEqualTo(1);
assertThat(
flowRepository.findWithSource(Pageable.UNPAGED, TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key2", "value2")).build()
)
).size())
.isEqualTo(1);
}
@Test
void find_noResult_forAnUnknownNamespace() {
assertThat(
flowRepository.find(Pageable.UNPAGED, TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key1", "value2")).build()
)
).size())
.isEqualTo(0);
assertThat(
flowRepository.findWithSource(Pageable.UNPAGED, TENANT,
List.of(
QueryFilter.builder().field(QueryFilter.Field.NAMESPACE).operation(QueryFilter.Op.EQUALS).value("io.kestra.tests").build(),
QueryFilter.builder().field(QueryFilter.Field.LABELS).operation(QueryFilter.Op.EQUALS).value(Map.of("key1", "value2")).build()
)
).size())
.isEqualTo(0);
}
@Test
protected void findSpecialChars() {
ArrayListTotal<SearchResult<Flow>> save = flowRepository.findSourceCode(Pageable.unpaged(), "https://api.chucknorris.io", TENANT, null);
assertThat((long) save.size()).isEqualTo(2L);
}
@Test
void findDistinctNamespace() {
List<String> distinctNamespace = flowRepository.findDistinctNamespace(TENANT);
assertThat((long) distinctNamespace.size()).isEqualTo(9L);
}
@Test
void shouldReturnForGivenQueryWildCardFilters() {
List<QueryFilter> filters = List.of(
QueryFilter.builder().field(QueryFilter.Field.QUERY).operation(QueryFilter.Op.EQUALS).value("*").build()
);
ArrayListTotal<Flow> flows = flowRepository.find(Pageable.from(1, 10), TENANT, filters);
assertThat(flows.size()).isEqualTo(10);
assertThat(flows.getTotal()).isEqualTo(Helpers.FLOWS_COUNT);
}
}

View File

@@ -1,6 +1,7 @@
package io.kestra.core.runners;
import io.kestra.core.junit.annotations.ExecuteFlow;
import io.kestra.core.junit.annotations.FlakyTest;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
@@ -28,17 +29,15 @@ import static org.assertj.core.api.Assertions.assertThat;
// must be per-class to allow calling once init() which took a lot of time
public abstract class AbstractRunnerTest {
public static final String TENANT_1 = "tenant1";
public static final String TENANT_2 = "tenant2";
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
@Named(QueueFactoryInterface.WORKERTASKLOG_NAMED)
protected QueueInterface<LogEntry> logsQueue;
@Inject
protected RestartCaseTest restartCaseTest;
private RestartCaseTest restartCaseTest;
@Inject
protected FlowTriggerCaseTest flowTriggerCaseTest;
@@ -50,13 +49,13 @@ public abstract class AbstractRunnerTest {
private PluginDefaultsCaseTest pluginDefaultsCaseTest;
@Inject
protected FlowCaseTest flowCaseTest;
private FlowCaseTest flowCaseTest;
@Inject
private WorkingDirectoryTest.Suite workingDirectoryTest;
@Inject
protected PauseTest.Suite pauseTest;
private PauseTest.Suite pauseTest;
@Inject
private SkipExecutionCaseTest skipExecutionCaseTest;
@@ -68,10 +67,10 @@ public abstract class AbstractRunnerTest {
protected LoopUntilCaseTest loopUntilTestCaseTest;
@Inject
protected FlowConcurrencyCaseTest flowConcurrencyCaseTest;
private FlowConcurrencyCaseTest flowConcurrencyCaseTest;
@Inject
protected ScheduleDateCaseTest scheduleDateCaseTest;
private ScheduleDateCaseTest scheduleDateCaseTest;
@Inject
protected FlowInputOutput flowIO;
@@ -80,7 +79,7 @@ public abstract class AbstractRunnerTest {
private SLATestCase slaTestCase;
@Inject
protected ChangeStateTestCase changeStateTestCase;
private ChangeStateTestCase changeStateTestCase;
@Inject
private AfterExecutionTestCase afterExecutionTestCase;
@@ -173,7 +172,7 @@ public abstract class AbstractRunnerTest {
@Test
@LoadFlows({"flows/valids/restart_local_errors.yaml"})
protected void restartFailedThenFailureWithLocalErrors() throws Exception {
void restartFailedThenFailureWithLocalErrors() throws Exception {
restartCaseTest.restartFailedThenFailureWithLocalErrors();
}
@@ -196,12 +195,12 @@ public abstract class AbstractRunnerTest {
}
@Test
@LoadFlows(value = {"flows/valids/trigger-flow-listener-no-inputs.yaml",
@LoadFlows({"flows/valids/trigger-flow-listener-no-inputs.yaml",
"flows/valids/trigger-flow-listener.yaml",
"flows/valids/trigger-flow-listener-namespace-condition.yaml",
"flows/valids/trigger-flow.yaml"}, tenantId = "listener-tenant")
"flows/valids/trigger-flow.yaml"})
void flowTrigger() throws Exception {
flowTriggerCaseTest.trigger("listener-tenant");
flowTriggerCaseTest.trigger();
}
@Test // flaky on CI but never fail locally
@@ -211,11 +210,13 @@ public abstract class AbstractRunnerTest {
flowTriggerCaseTest.triggerWithPause();
}
@FlakyTest
@Disabled
@Test
@LoadFlows(value = {"flows/valids/trigger-flow-listener-with-concurrency-limit.yaml",
"flows/valids/trigger-flow-with-concurrency-limit.yaml"}, tenantId = "trigger-tenant")
@LoadFlows({"flows/valids/trigger-flow-listener-with-concurrency-limit.yaml",
"flows/valids/trigger-flow-with-concurrency-limit.yaml"})
void flowTriggerWithConcurrencyLimit() throws Exception {
flowTriggerCaseTest.triggerWithConcurrencyLimit("trigger-tenant");
flowTriggerCaseTest.triggerWithConcurrencyLimit();
}
@Test
@@ -227,11 +228,11 @@ public abstract class AbstractRunnerTest {
}
@Test // Flaky on CI but never locally even with 100 repetitions
@LoadFlows(value = {"flows/valids/trigger-flow-listener-namespace-condition.yaml",
@LoadFlows({"flows/valids/trigger-flow-listener-namespace-condition.yaml",
"flows/valids/trigger-multiplecondition-flow-c.yaml",
"flows/valids/trigger-multiplecondition-flow-d.yaml"}, tenantId = "condition-tenant")
"flows/valids/trigger-multiplecondition-flow-d.yaml"})
void multipleConditionTriggerFailed() throws Exception {
multipleConditionTriggerCaseTest.failed("condition-tenant");
multipleConditionTriggerCaseTest.failed();
}
@Test
@@ -244,11 +245,11 @@ public abstract class AbstractRunnerTest {
@Disabled
@Test
@LoadFlows(value = {"flows/valids/flow-trigger-preconditions-flow-listen.yaml",
@LoadFlows({"flows/valids/flow-trigger-preconditions-flow-listen.yaml",
"flows/valids/flow-trigger-preconditions-flow-a.yaml",
"flows/valids/flow-trigger-preconditions-flow-b.yaml"}, tenantId = TENANT_1)
"flows/valids/flow-trigger-preconditions-flow-b.yaml"})
void flowTriggerPreconditionsMergeOutputs() throws Exception {
multipleConditionTriggerCaseTest.flowTriggerPreconditionsMergeOutputs(TENANT_1);
multipleConditionTriggerCaseTest.flowTriggerPreconditionsMergeOutputs();
}
@Test
@@ -278,19 +279,19 @@ public abstract class AbstractRunnerTest {
}
@Test
@LoadFlows(value = {"flows/valids/switch.yaml",
@LoadFlows({"flows/valids/switch.yaml",
"flows/valids/task-flow.yaml",
"flows/valids/task-flow-inherited-labels.yaml"}, tenantId = TENANT_1)
"flows/valids/task-flow-inherited-labels.yaml"})
void flowWaitFailed() throws Exception {
flowCaseTest.waitFailed(TENANT_1);
flowCaseTest.waitFailed();
}
@Test
@LoadFlows(value = {"flows/valids/switch.yaml",
@LoadFlows({"flows/valids/switch.yaml",
"flows/valids/task-flow.yaml",
"flows/valids/task-flow-inherited-labels.yaml"}, tenantId = TENANT_2)
"flows/valids/task-flow-inherited-labels.yaml"})
public void invalidOutputs() throws Exception {
flowCaseTest.invalidOutputs(TENANT_2);
flowCaseTest.invalidOutputs();
}
@Test
@@ -300,9 +301,9 @@ public abstract class AbstractRunnerTest {
}
@Test
@LoadFlows(value = {"flows/valids/working-directory.yaml"}, tenantId = TENANT_1)
@LoadFlows(value = {"flows/valids/working-directory.yaml"}, tenantId = "tenant1")
public void workerFailed() throws Exception {
workingDirectoryTest.failed(TENANT_1, runnerUtils);
workingDirectoryTest.failed("tenant1", runnerUtils);
}
@Test
@@ -353,6 +354,7 @@ public abstract class AbstractRunnerTest {
skipExecutionCaseTest.skipExecution();
}
@Disabled
@Test
@LoadFlows({"flows/valids/for-each-item-subflow.yaml",
"flows/valids/for-each-item.yaml"})
@@ -361,11 +363,12 @@ public abstract class AbstractRunnerTest {
}
@Test
@LoadFlows(value = {"flows/valids/for-each-item.yaml"}, tenantId = TENANT_1)
@LoadFlows({"flows/valids/for-each-item.yaml"})
protected void forEachItemEmptyItems() throws Exception {
forEachItemCaseTest.forEachItemEmptyItems(TENANT_1);
forEachItemCaseTest.forEachItemEmptyItems();
}
@Disabled
@Test
@LoadFlows({"flows/valids/for-each-item-subflow-failed.yaml",
"flows/valids/for-each-item-failed.yaml"})
@@ -381,16 +384,16 @@ public abstract class AbstractRunnerTest {
}
@Test // flaky on CI but always pass locally even with 100 iterations
@LoadFlows(value = {"flows/valids/restart-for-each-item.yaml", "flows/valids/restart-child.yaml"}, tenantId = TENANT_1)
@LoadFlows({"flows/valids/restart-for-each-item.yaml", "flows/valids/restart-child.yaml"})
void restartForEachItem() throws Exception {
forEachItemCaseTest.restartForEachItem(TENANT_1);
forEachItemCaseTest.restartForEachItem();
}
@Test
@LoadFlows(value = {"flows/valids/for-each-item-subflow.yaml",
"flows/valids/for-each-item-in-if.yaml"}, tenantId = TENANT_1)
@LoadFlows({"flows/valids/for-each-item-subflow.yaml",
"flows/valids/for-each-item-in-if.yaml"})
protected void forEachItemInIf() throws Exception {
forEachItemCaseTest.forEachItemInIf(TENANT_1);
forEachItemCaseTest.forEachItemInIf();
}
@Test
@@ -431,11 +434,12 @@ public abstract class AbstractRunnerTest {
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"}, tenantId = TENANT_1)
@LoadFlows({"flows/valids/flow-concurrency-for-each-item.yaml", "flows/valids/flow-concurrency-queue.yml"})
protected void flowConcurrencyWithForEachItem() throws Exception {
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem(TENANT_1);
flowConcurrencyCaseTest.flowConcurrencyWithForEachItem();
}
@Disabled
@Test
@LoadFlows({"flows/valids/flow-concurrency-queue-fail.yml"})
protected void concurrencyQueueRestarted() throws Exception {
@@ -448,12 +452,6 @@ public abstract class AbstractRunnerTest {
flowConcurrencyCaseTest.flowConcurrencyQueueAfterExecution();
}
@Test
@LoadFlows(value = {"flows/valids/flow-concurrency-subflow.yml", "flows/valids/flow-concurrency-cancel.yml"}, tenantId = TENANT_1)
void flowConcurrencySubflow() throws Exception {
flowConcurrencyCaseTest.flowConcurrencySubflow(TENANT_1);
}
@Test
@ExecuteFlow("flows/valids/executable-fail.yml")
void badExecutable(Execution execution) {
@@ -506,9 +504,9 @@ public abstract class AbstractRunnerTest {
}
@Test
@LoadFlows(value = {"flows/valids/minimal.yaml"}, tenantId = TENANT_1)
@LoadFlows({"flows/valids/minimal.yaml"})
void shouldScheduleOnDate() throws Exception {
scheduleDateCaseTest.shouldScheduleOnDate(TENANT_1);
scheduleDateCaseTest.shouldScheduleOnDate();
}
@Test
@@ -530,15 +528,15 @@ public abstract class AbstractRunnerTest {
}
@Test
@LoadFlows(value = {"flows/valids/sla-execution-condition.yaml"}, tenantId = TENANT_1)
@LoadFlows({"flows/valids/sla-execution-condition.yaml"})
void executionConditionSLAShouldCancel() throws Exception {
slaTestCase.executionConditionSLAShouldCancel(TENANT_1);
slaTestCase.executionConditionSLAShouldCancel();
}
@Test
@LoadFlows(value = {"flows/valids/sla-execution-condition.yaml"}, tenantId = TENANT_2)
@LoadFlows({"flows/valids/sla-execution-condition.yaml"})
void executionConditionSLAShouldLabel() throws Exception {
slaTestCase.executionConditionSLAShouldLabel(TENANT_2);
slaTestCase.executionConditionSLAShouldLabel();
}
@Test
@@ -558,15 +556,15 @@ public abstract class AbstractRunnerTest {
}
@Test
@ExecuteFlow(value = "flows/valids/failed-first.yaml", tenantId = TENANT_1)
@ExecuteFlow("flows/valids/failed-first.yaml")
public void changeStateShouldEndsInSuccess(Execution execution) throws Exception {
changeStateTestCase.changeStateShouldEndsInSuccess(execution);
}
@Test
@LoadFlows(value = {"flows/valids/failed-first.yaml", "flows/valids/subflow-parent-of-failed.yaml"}, tenantId = TENANT_2)
@LoadFlows({"flows/valids/failed-first.yaml", "flows/valids/subflow-parent-of-failed.yaml"})
public void changeStateInSubflowShouldEndsParentFlowInSuccess() throws Exception {
changeStateTestCase.changeStateInSubflowShouldEndsParentFlowInSuccess(TENANT_2);
changeStateTestCase.changeStateInSubflowShouldEndsParentFlowInSuccess();
}
@Test

View File

@@ -3,18 +3,25 @@ package io.kestra.core.runners;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.Type;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.ExecutionService;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import reactor.core.publisher.Flux;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
@Singleton
public class ChangeStateTestCase {
public static final String NAMESPACE = "io.kestra.tests";
@Inject
private FlowRepositoryInterface flowRepository;
@@ -22,7 +29,11 @@ public class ChangeStateTestCase {
private ExecutionService executionService;
@Inject
private TestRunnerUtils runnerUtils;
@Named(QueueFactoryInterface.EXECUTION_NAMED)
private QueueInterface<Execution> executionQueue;
@Inject
private RunnerUtils runnerUtils;
public void changeStateShouldEndsInSuccess(Execution execution) throws Exception {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
@@ -30,40 +41,73 @@ public class ChangeStateTestCase {
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
// await for the last execution
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Execution> lastExecution = new AtomicReference<>();
Flux<Execution> receivedExecutions = TestsUtils.receive(executionQueue, either -> {
Execution exec = either.getLeft();
if (execution.getId().equals(exec.getId()) && exec.getState().getCurrent() == State.Type.SUCCESS) {
lastExecution.set(exec);
latch.countDown();
}
});
Flow flow = flowRepository.findByExecution(execution);
Execution markedAs = executionService.markAs(execution, flow, execution.getTaskRunList().getFirst().getId(), State.Type.SUCCESS);
Execution lastExecution = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), markedAs);
executionQueue.emit(markedAs);
assertThat(lastExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(lastExecution.getTaskRunList()).hasSize(2);
assertThat(lastExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
receivedExecutions.blockLast();
assertThat(lastExecution.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(lastExecution.get().getTaskRunList()).hasSize(2);
assertThat(lastExecution.get().getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
public void changeStateInSubflowShouldEndsParentFlowInSuccess(String tenantId) throws Exception {
public void changeStateInSubflowShouldEndsParentFlowInSuccess() throws Exception {
// await for the subflow execution
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Execution> lastExecution = new AtomicReference<>();
Flux<Execution> receivedExecutions = TestsUtils.receive(executionQueue, either -> {
Execution exec = either.getLeft();
if ("failed-first".equals(exec.getFlowId()) && exec.getState().getCurrent() == State.Type.FAILED) {
lastExecution.set(exec);
latch.countDown();
}
});
// run the parent flow
Execution execution = runnerUtils.runOne(tenantId, NAMESPACE, "subflow-parent-of-failed");
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "subflow-parent-of-failed");
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(execution.getTaskRunList()).hasSize(1);
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
// assert on the subflow
Execution lastExecution = runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().equals(Type.FAILED), tenantId, NAMESPACE, "failed-first");
assertThat(lastExecution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(lastExecution.getTaskRunList()).hasSize(1);
assertThat(lastExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(latch.await(10, TimeUnit.SECONDS)).isTrue();
receivedExecutions.blockLast();
assertThat(lastExecution.get().getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(lastExecution.get().getTaskRunList()).hasSize(1);
assertThat(lastExecution.get().getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
// await for the parent execution
CountDownLatch parentLatch = new CountDownLatch(1);
AtomicReference<Execution> lastParentExecution = new AtomicReference<>();
receivedExecutions = TestsUtils.receive(executionQueue, either -> {
Execution exec = either.getLeft();
if (execution.getId().equals(exec.getId()) && exec.getState().isTerminated()) {
lastParentExecution.set(exec);
parentLatch.countDown();
}
});
// restart the subflow
Flow flow = flowRepository.findByExecution(lastExecution);
Execution markedAs = executionService.markAs(lastExecution, flow, lastExecution.getTaskRunList().getFirst().getId(), State.Type.SUCCESS);
runnerUtils.emitAndAwaitExecution(e -> e.getState().isTerminated(), markedAs);
//We wait for the subflow execution to pass from failed to success
Execution lastParentExecution = runnerUtils.awaitFlowExecution(e ->
e.getTaskRunList().getFirst().getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE, "subflow-parent-of-failed");
Flow flow = flowRepository.findByExecution(lastExecution.get());
Execution markedAs = executionService.markAs(lastExecution.get(), flow, lastExecution.get().getTaskRunList().getFirst().getId(), State.Type.SUCCESS);
executionQueue.emit(markedAs);
// assert for the parent flow
assertThat(lastParentExecution.getState().getCurrent()).isEqualTo(State.Type.FAILED); // FIXME should be success but it's FAILED on unit tests
assertThat(lastParentExecution.getTaskRunList()).hasSize(1);
assertThat(lastParentExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(parentLatch.await(10, TimeUnit.SECONDS)).isTrue();
receivedExecutions.blockLast();
assertThat(lastParentExecution.get().getState().getCurrent()).isEqualTo(State.Type.FAILED); // FIXME should be success but it's FAILED on unit tests
assertThat(lastParentExecution.get().getTaskRunList()).hasSize(1);
assertThat(lastParentExecution.get().getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
}

View File

@@ -18,7 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat;
public class EmptyVariablesTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Inject
private FlowInputOutput flowIO;

View File

@@ -58,7 +58,7 @@ class ExecutionServiceTest {
LogRepositoryInterface logRepository;
@Inject
TestRunnerUtils runnerUtils;
RunnerUtils runnerUtils;
@Test
@LoadFlows({"flows/valids/restart_last_failed.yaml"})

View File

@@ -3,15 +3,19 @@ package io.kestra.core.runners;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.History;
import io.kestra.core.models.flows.State.Type;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.ExecutionService;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import reactor.core.publisher.Flux;
import java.io.File;
import java.io.FileInputStream;
@@ -21,21 +25,24 @@ import java.net.URISyntaxException;
import java.nio.file.Files;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Singleton
public class FlowConcurrencyCaseTest {
public static final String NAMESPACE = "io.kestra.tests";
@Inject
private StorageInterface storageInterface;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
private FlowInputOutput flowIO;
@@ -43,168 +50,354 @@ public class FlowConcurrencyCaseTest {
@Inject
private FlowRepositoryInterface flowRepository;
@Inject
@Named(QueueFactoryInterface.EXECUTION_NAMED)
protected QueueInterface<Execution> executionQueue;
@Inject
private ExecutionService executionService;
public void flowConcurrencyCancel() throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel");
public void flowConcurrencyCancel() throws TimeoutException, QueueException, InterruptedException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel");
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
CountDownLatch latch1 = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (e.getLeft().getId().equals(execution1.getId())) {
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
latch1.countDown();
}
}
// FIXME we should fail if we receive the cancel execution again but on Kafka it happens
});
assertTrue(latch1.await(1, TimeUnit.MINUTES));
receive.blockLast();
}
public void flowConcurrencyFail() throws TimeoutException, QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "flow-concurrency-fail");
public void flowConcurrencyFail() throws TimeoutException, QueueException, InterruptedException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-fail", null, null, Duration.ofSeconds(30));
Execution execution2 = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-fail");
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.FAILED);
runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
CountDownLatch latch1 = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (e.getLeft().getId().equals(execution1.getId())) {
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
latch1.countDown();
}
}
// FIXME we should fail if we receive the cancel execution again but on Kafka it happens
});
assertTrue(latch1.await(1, TimeUnit.MINUTES));
receive.blockLast();
}
public void flowConcurrencyQueue() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
public void flowConcurrencyQueue() throws TimeoutException, QueueException, InterruptedException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue", Optional.empty())
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
Execution executionResult1 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
executionQueue.emit(execution2);
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
assertThat(executionResult1.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(executionResult2.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
var executionResult1 = new AtomicReference<Execution>();
var executionResult2 = new AtomicReference<Execution>();
CountDownLatch latch1 = new CountDownLatch(1);
CountDownLatch latch2 = new CountDownLatch(1);
CountDownLatch latch3 = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (e.getLeft().getId().equals(execution1.getId())) {
executionResult1.set(e.getLeft());
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
latch1.countDown();
}
}
if (e.getLeft().getId().equals(execution2.getId())) {
executionResult2.set(e.getLeft());
if (e.getLeft().getState().getCurrent() == State.Type.RUNNING) {
latch2.countDown();
}
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
latch3.countDown();
}
}
});
assertTrue(latch1.await(1, TimeUnit.MINUTES));
assertTrue(latch2.await(1, TimeUnit.MINUTES));
assertTrue(latch3.await(1, TimeUnit.MINUTES));
receive.blockLast();
assertThat(executionResult1.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(executionResult2.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(executionResult2.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyQueuePause() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause");
public void flowConcurrencyQueuePause() throws TimeoutException, QueueException, InterruptedException {
AtomicReference<String> firstExecutionId = new AtomicReference<>();
var firstExecutionResult = new AtomicReference<Execution>();
var secondExecutionResult = new AtomicReference<Execution>();
CountDownLatch firstExecutionLatch = new CountDownLatch(1);
CountDownLatch secondExecutionLatch = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (!"flow-concurrency-queue-pause".equals(e.getLeft().getFlowId())){
return;
}
String currentId = e.getLeft().getId();
Type currentState = e.getLeft().getState().getCurrent();
if (firstExecutionId.get() == null) {
firstExecutionId.set(currentId);
}
if (currentId.equals(firstExecutionId.get())) {
if (currentState == State.Type.SUCCESS) {
firstExecutionResult.set(e.getLeft());
firstExecutionLatch.countDown();
}
} else {
if (currentState == State.Type.SUCCESS) {
secondExecutionResult.set(e.getLeft());
secondExecutionLatch.countDown();
}
}
});
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-pause");
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-pause", Optional.empty())
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-pause", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
Execution firstExecutionResult = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
executionQueue.emit(execution2);
assertThat(execution1.getState().isPaused()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
assertThat(firstExecutionResult.getId()).isEqualTo(execution1.getId());
assertThat(firstExecutionResult.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(secondExecutionResult.getId()).isEqualTo(execution2.getId());
assertThat(secondExecutionResult.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(secondExecutionResult.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(secondExecutionResult.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(secondExecutionResult.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
assertTrue(firstExecutionLatch.await(10, TimeUnit.SECONDS));
assertTrue(secondExecutionLatch.await(10, TimeUnit.SECONDS));
receive.blockLast();
assertThat(firstExecutionResult.get().getId()).isEqualTo(execution1.getId());
assertThat(firstExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(secondExecutionResult.get().getId()).isEqualTo(execution2.getId());
assertThat(secondExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(secondExecutionResult.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(secondExecutionResult.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(secondExecutionResult.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyCancelPause() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause");
public void flowConcurrencyCancelPause() throws TimeoutException, QueueException, InterruptedException {
AtomicReference<String> firstExecutionId = new AtomicReference<>();
var firstExecutionResult = new AtomicReference<Execution>();
var secondExecutionResult = new AtomicReference<Execution>();
CountDownLatch firstExecLatch = new CountDownLatch(1);
CountDownLatch secondExecLatch = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (!"flow-concurrency-cancel-pause".equals(e.getLeft().getFlowId())){
return;
}
String currentId = e.getLeft().getId();
Type currentState = e.getLeft().getState().getCurrent();
if (firstExecutionId.get() == null) {
firstExecutionId.set(currentId);
}
if (currentId.equals(firstExecutionId.get())) {
if (currentState == State.Type.SUCCESS) {
firstExecutionResult.set(e.getLeft());
firstExecLatch.countDown();
}
} else {
if (currentState == State.Type.CANCELLED) {
secondExecutionResult.set(e.getLeft());
secondExecLatch.countDown();
}
}
});
Execution execution1 = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel-pause");
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-cancel-pause", Optional.empty())
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-cancel-pause", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution secondExecutionResult = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.CANCELLED), execution2);
Execution firstExecutionResult = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
executionQueue.emit(execution2);
assertThat(execution1.getState().isPaused()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
assertThat(firstExecutionResult.getId()).isEqualTo(execution1.getId());
assertThat(firstExecutionResult.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(secondExecutionResult.getId()).isEqualTo(execution2.getId());
assertThat(secondExecutionResult.getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
assertThat(secondExecutionResult.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(secondExecutionResult.getState().getHistories().get(1).getState()).isEqualTo(State.Type.CANCELLED);
assertTrue(firstExecLatch.await(10, TimeUnit.SECONDS));
assertTrue(secondExecLatch.await(10, TimeUnit.SECONDS));
receive.blockLast();
assertThat(firstExecutionResult.get().getId()).isEqualTo(execution1.getId());
assertThat(firstExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(secondExecutionResult.get().getId()).isEqualTo(execution2.getId());
assertThat(secondExecutionResult.get().getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
assertThat(secondExecutionResult.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(secondExecutionResult.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.CANCELLED);
}
public void flowConcurrencyWithForEachItem(String tenantId) throws QueueException, URISyntaxException, IOException {
URI file = storageUpload(tenantId);
public void flowConcurrencyWithForEachItem() throws TimeoutException, QueueException, InterruptedException, URISyntaxException, IOException {
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
Execution forEachItem = runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-for-each-item", null,
Execution forEachItem = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-for-each-item", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs), Duration.ofSeconds(5));
assertThat(forEachItem.getState().getCurrent()).isEqualTo(Type.RUNNING);
Set<String> executionIds = new HashSet<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if ("flow-concurrency-queue".equals(e.getLeft().getFlowId()) && e.getLeft().getState().isRunning()) {
executionIds.add(e.getLeft().getId());
}
});
Execution terminated = runnerUtils.awaitExecution(e -> e.getState().isTerminated(),forEachItem);
// wait a little to be sure there are not too many executions started
Thread.sleep(500);
assertThat(executionIds).hasSize(1);
receive.blockLast();
Execution terminated = runnerUtils.awaitExecution(e -> e.getId().equals(forEachItem.getId()) && e.getState().isTerminated(), () -> {}, Duration.ofSeconds(10));
assertThat(terminated.getState().getCurrent()).isEqualTo(Type.SUCCESS);
List<Execution> executions = runnerUtils.awaitFlowExecutionNumber(2, tenantId, NAMESPACE, "flow-concurrency-queue");
assertThat(executions).extracting(e -> e.getState().getCurrent()).containsOnly(Type.SUCCESS);
assertThat(executions.stream()
.map(e -> e.getState().getHistories())
.flatMap(List::stream)
.map(History::getState)
.toList()).contains(Type.QUEUED);
}
public void flowConcurrencyQueueRestarted() throws Exception {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE,
"flow-concurrency-queue-fail", null, null, Duration.ofSeconds(30));
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-fail", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-fail", Optional.empty())
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-fail", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.RUNNING), execution2);
executionQueue.emit(execution2);
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
var executionResult1 = new AtomicReference<Execution>();
var executionResult2 = new AtomicReference<Execution>();
CountDownLatch latch1 = new CountDownLatch(2);
AtomicReference<Execution> failedExecution = new AtomicReference<>();
CountDownLatch latch2 = new CountDownLatch(1);
CountDownLatch latch3 = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (e.getLeft().getId().equals(execution1.getId())) {
executionResult1.set(e.getLeft());
if (e.getLeft().getState().getCurrent() == Type.FAILED) {
failedExecution.set(e.getLeft());
latch1.countDown();
}
}
if (e.getLeft().getId().equals(execution2.getId())) {
executionResult2.set(e.getLeft());
if (e.getLeft().getState().getCurrent() == State.Type.RUNNING) {
latch2.countDown();
}
if (e.getLeft().getState().getCurrent() == Type.FAILED) {
latch3.countDown();
}
}
});
assertTrue(latch2.await(1, TimeUnit.MINUTES));
assertThat(failedExecution.get()).isNotNull();
// here the first fail and the second is now running.
// we restart the first one, it should be queued then fail again.
Execution failedExecution = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution1);
Execution restarted = executionService.restart(failedExecution, null);
Execution executionResult1 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), restarted);
Execution executionResult2 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.FAILED), execution2);
Execution restarted = executionService.restart(failedExecution.get(), null);
executionQueue.emit(restarted);
assertThat(executionResult1.getState().getCurrent()).isEqualTo(Type.FAILED);
assertTrue(latch3.await(1, TimeUnit.MINUTES));
assertTrue(latch1.await(1, TimeUnit.MINUTES));
receive.blockLast();
assertThat(executionResult1.get().getState().getCurrent()).isEqualTo(Type.FAILED);
// it should have been queued after restarted
assertThat(executionResult1.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED)).isTrue();
assertThat(executionResult1.getState().getHistories().stream().anyMatch(history -> history.getState() == Type.QUEUED)).isTrue();
assertThat(executionResult2.getState().getCurrent()).isEqualTo(Type.FAILED);
assertThat(executionResult2.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(executionResult2.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
assertThat(executionResult1.get().getState().getHistories().stream().anyMatch(history -> history.getState() == Type.RESTARTED)).isTrue();
assertThat(executionResult1.get().getState().getHistories().stream().anyMatch(history -> history.getState() == Type.QUEUED)).isTrue();
assertThat(executionResult2.get().getState().getCurrent()).isEqualTo(Type.FAILED);
assertThat(executionResult2.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(executionResult2.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(executionResult2.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencyQueueAfterExecution() throws QueueException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
public void flowConcurrencyQueueAfterExecution() throws TimeoutException, QueueException, InterruptedException {
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-after-execution", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository
.findById(MAIN_TENANT, NAMESPACE, "flow-concurrency-queue-after-execution", Optional.empty())
.findById(MAIN_TENANT, "io.kestra.tests", "flow-concurrency-queue-after-execution", Optional.empty())
.orElseThrow();
Execution execution2 = Execution.newExecution(flow, null, null, Optional.empty());
Execution executionResult2 = runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution2);
Execution executionResult1 = runnerUtils.awaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution1);
executionQueue.emit(execution2);
assertThat(executionResult1.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(executionResult2.getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(executionResult2.getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
assertThat(execution1.getState().isRunning()).isTrue();
assertThat(execution2.getState().getCurrent()).isEqualTo(State.Type.CREATED);
var executionResult1 = new AtomicReference<Execution>();
var executionResult2 = new AtomicReference<Execution>();
CountDownLatch latch1 = new CountDownLatch(1);
CountDownLatch latch2 = new CountDownLatch(1);
CountDownLatch latch3 = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (e.getLeft().getId().equals(execution1.getId())) {
executionResult1.set(e.getLeft());
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
latch1.countDown();
}
}
if (e.getLeft().getId().equals(execution2.getId())) {
executionResult2.set(e.getLeft());
if (e.getLeft().getState().getCurrent() == State.Type.RUNNING) {
latch2.countDown();
}
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
latch3.countDown();
}
}
});
assertTrue(latch1.await(1, TimeUnit.MINUTES));
assertTrue(latch2.await(1, TimeUnit.MINUTES));
assertTrue(latch3.await(1, TimeUnit.MINUTES));
receive.blockLast();
assertThat(executionResult1.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(executionResult2.get().getState().getHistories().getFirst().getState()).isEqualTo(State.Type.CREATED);
assertThat(executionResult2.get().getState().getHistories().get(1).getState()).isEqualTo(State.Type.QUEUED);
assertThat(executionResult2.get().getState().getHistories().get(2).getState()).isEqualTo(State.Type.RUNNING);
}
public void flowConcurrencySubflow(String tenantId) throws TimeoutException, QueueException {
runnerUtils.runOneUntilRunning(tenantId, NAMESPACE, "flow-concurrency-subflow", null, null, Duration.ofSeconds(30));
runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-subflow");
List<Execution> subFlowExecs = runnerUtils.awaitFlowExecutionNumber(2, tenantId, NAMESPACE, "flow-concurrency-cancel");
assertThat(subFlowExecs).extracting(e -> e.getState().getCurrent()).containsExactlyInAnyOrder(Type.SUCCESS, Type.CANCELLED);
// run another execution to be sure that everything work (purge is correctly done)
Execution execution3 = runnerUtils.runOne(tenantId, NAMESPACE, "flow-concurrency-subflow");
assertThat(execution3.getState().getCurrent()).isEqualTo(Type.SUCCESS);
runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE, "flow-concurrency-cancel");
}
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
private URI storageUpload() throws URISyntaxException, IOException {
File tempFile = File.createTempFile("file", ".txt");
Files.write(tempFile.toPath(), content());
return storageInterface.put(
tenantId,
MAIN_TENANT,
null,
new URI("/file/storage/file.txt"),
new FileInputStream(tempFile)

View File

@@ -4,22 +4,19 @@ import io.kestra.core.models.flows.FlowWithSource;
import io.kestra.core.models.flows.GenericFlow;
import io.kestra.core.models.property.Property;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.TestsUtils;
import java.time.Duration;
import java.util.List;
import java.util.concurrent.TimeoutException;
import lombok.SneakyThrows;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.FlowListenersInterface;
import io.kestra.plugin.core.debug.Return;
import io.kestra.core.utils.IdUtils;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import jakarta.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
@@ -27,11 +24,11 @@ abstract public class FlowListenersTest {
@Inject
protected FlowRepositoryInterface flowRepository;
protected static FlowWithSource create(String tenantId, String flowId, String taskId) {
protected static FlowWithSource create(String flowId, String taskId) {
FlowWithSource flow = FlowWithSource.builder()
.id(flowId)
.namespace("io.kestra.unittest")
.tenantId(tenantId)
.tenantId(MAIN_TENANT)
.revision(1)
.tasks(Collections.singletonList(Return.builder()
.id(taskId)
@@ -42,65 +39,88 @@ abstract public class FlowListenersTest {
return flow.toBuilder().source(flow.sourceOrGenerateIfNull()).build();
}
private static final Logger LOG = LoggerFactory.getLogger(FlowListenersTest.class);
public void suite(FlowListenersInterface flowListenersService) throws TimeoutException {
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
public void suite(FlowListenersInterface flowListenersService) {
flowListenersService.run();
AtomicInteger count = new AtomicInteger();
var ref = new Ref();
flowListenersService.listen(flows -> count.set(getFlowsForTenant(flowListenersService, tenant).size()));
flowListenersService.listen(flows -> {
count.set(flows.size());
ref.countDownLatch.countDown();
});
// initial state
LOG.info("-----------> wait for zero");
Await.until(() -> count.get() == 0, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isZero();
wait(ref, () -> {
assertThat(count.get()).isZero();
assertThat(flowListenersService.flows().size()).isZero();
});
// resend on startup done for kafka
LOG.info("-----------> wait for zero kafka");
if (flowListenersService.getClass().getName().equals("io.kestra.ee.runner.kafka.KafkaFlowListeners")) {
Await.until(() -> count.get() == 0, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isZero();
wait(ref, () -> {
assertThat(count.get()).isZero();
assertThat(flowListenersService.flows().size()).isZero();
});
}
// create first
LOG.info("-----------> create fist flow");
FlowWithSource first = create(tenant, "first_" + IdUtils.create(), "test");
FlowWithSource firstUpdated = create(tenant, first.getId(), "test2");
FlowWithSource first = create("first_" + IdUtils.create(), "test");
FlowWithSource firstUpdated = create(first.getId(), "test2");
flowRepository.create(GenericFlow.of(first));
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
wait(ref, () -> {
assertThat(count.get()).isEqualTo(1);
assertThat(flowListenersService.flows().size()).isEqualTo(1);
});
// create the same id than first, no additional flows
first = flowRepository.update(GenericFlow.of(firstUpdated), first);
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
wait(ref, () -> {
assertThat(count.get()).isEqualTo(1);
assertThat(flowListenersService.flows().size()).isEqualTo(1);
//assertThat(flowListenersService.flows().getFirst().getFirst().getId(), is("test2"));
});
FlowWithSource second = create(tenant, "second_" + IdUtils.create(), "test");
FlowWithSource second = create("second_" + IdUtils.create(), "test");
// create a new one
flowRepository.create(GenericFlow.of(second));
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
wait(ref, () -> {
assertThat(count.get()).isEqualTo(2);
assertThat(flowListenersService.flows().size()).isEqualTo(2);
});
// delete first
FlowWithSource deleted = flowRepository.delete(first);
Await.until(() -> count.get() == 1, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(1);
wait(ref, () -> {
assertThat(count.get()).isEqualTo(1);
assertThat(flowListenersService.flows().size()).isEqualTo(1);
});
// restore must works
flowRepository.create(GenericFlow.of(first));
Await.until(() -> count.get() == 2, Duration.ofMillis(10), Duration.ofSeconds(5));
assertThat(getFlowsForTenant(flowListenersService, tenant).size()).isEqualTo(2);
wait(ref, () -> {
assertThat(count.get()).isEqualTo(2);
assertThat(flowListenersService.flows().size()).isEqualTo(2);
});
FlowWithSource withTenant = first.toBuilder().tenantId("some-tenant").build();
flowRepository.create(GenericFlow.of(withTenant));
wait(ref, () -> {
assertThat(count.get()).isEqualTo(3);
assertThat(flowListenersService.flows().size()).isEqualTo(3);
});
}
public List<FlowWithSource> getFlowsForTenant(FlowListenersInterface flowListenersService, String tenantId){
return flowListenersService.flows().stream()
.filter(f -> tenantId.equals(f.getTenantId()))
.toList();
public static class Ref {
CountDownLatch countDownLatch = new CountDownLatch(1);
}
@SneakyThrows
private void wait(Ref ref, Runnable run) {
ref.countDownLatch.await(60, TimeUnit.SECONDS);
run.run();
ref.countDownLatch = new CountDownLatch(1);
}
}

View File

@@ -2,61 +2,82 @@ package io.kestra.core.runners;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.Type;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import reactor.core.publisher.Flux;
import java.time.Instant;
import java.util.Comparator;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Singleton
public class FlowTriggerCaseTest {
public static final String NAMESPACE = "io.kestra.tests.trigger";
@Inject
@Named(QueueFactoryInterface.EXECUTION_NAMED)
protected QueueInterface<Execution> executionQueue;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
public void trigger(String tenantId) throws InterruptedException, TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(tenantId, NAMESPACE, "trigger-flow");
public void trigger() throws InterruptedException, TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(3);
AtomicReference<Execution> flowListener = new AtomicReference<>();
AtomicReference<Execution> flowListenerNoInput = new AtomicReference<>();
AtomicReference<Execution> flowListenerNamespace = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.SUCCESS) {
if (flowListenerNoInput.get() == null && execution.getFlowId().equals("trigger-flow-listener-no-inputs")) {
flowListenerNoInput.set(execution);
countDownLatch.countDown();
} else if (flowListener.get() == null && execution.getFlowId().equals("trigger-flow-listener")) {
flowListener.set(execution);
countDownLatch.countDown();
} else if (flowListenerNamespace.get() == null && execution.getFlowId().equals("trigger-flow-listener-namespace-condition")) {
flowListenerNamespace.set(execution);
countDownLatch.countDown();
}
}
});
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger", "trigger-flow");
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
Execution flowListenerNoInput = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE,
"trigger-flow-listener-no-inputs");
Execution flowListener = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE,
"trigger-flow-listener");
Execution flowListenerNamespace = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS), tenantId, NAMESPACE,
"trigger-flow-listener-namespace-condition");
assertTrue(countDownLatch.await(15, TimeUnit.SECONDS));
receive.blockLast();
assertThat(flowListener.get().getTaskRunList().size()).isEqualTo(1);
assertThat(flowListener.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(flowListener.get().getTaskRunList().getFirst().getOutputs().get("value")).isEqualTo("childs: from parents: " + execution.getId());
assertThat(flowListener.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(flowListener.get().getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
assertThat(flowListener.get().getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
assertThat(flowListener.getTaskRunList().size()).isEqualTo(1);
assertThat(flowListener.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(flowListener.getTaskRunList().getFirst().getOutputs().get("value")).isEqualTo("childs: from parents: " + execution.getId());
assertThat(flowListener.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(flowListener.getTrigger().getVariables().get("namespace")).isEqualTo(NAMESPACE);
assertThat(flowListener.getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
assertThat(flowListenerNoInput.get().getTaskRunList().size()).isEqualTo(1);
assertThat(flowListenerNoInput.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(flowListenerNoInput.get().getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
assertThat(flowListenerNoInput.get().getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
assertThat(flowListenerNoInput.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(flowListenerNoInput.getTaskRunList().size()).isEqualTo(1);
assertThat(flowListenerNoInput.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(flowListenerNoInput.getTrigger().getVariables().get("namespace")).isEqualTo(NAMESPACE);
assertThat(flowListenerNoInput.getTrigger().getVariables().get("flowId")).isEqualTo("trigger-flow");
assertThat(flowListenerNoInput.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(flowListenerNamespace.getTaskRunList().size()).isEqualTo(1);
assertThat(flowListenerNamespace.getTrigger().getVariables().get("namespace")).isEqualTo(NAMESPACE);
assertThat(flowListenerNamespace.get().getTaskRunList().size()).isEqualTo(1);
assertThat(flowListenerNamespace.get().getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
// it will be triggered for 'trigger-flow' or any of the 'trigger-flow-listener*', so we only assert that it's one of them
assertThat(flowListenerNamespace.getTrigger().getVariables().get("flowId"))
assertThat(flowListenerNamespace.get().getTrigger().getVariables().get("flowId"))
.satisfiesAnyOf(
arg -> assertThat(arg).isEqualTo("trigger-flow"),
arg -> assertThat(arg).isEqualTo("trigger-flow-listener-no-inputs"),
@@ -64,43 +85,56 @@ public class FlowTriggerCaseTest {
);
}
public void triggerWithPause() throws TimeoutException, QueueException {
public void triggerWithPause() throws InterruptedException, TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(4);
List<Execution> flowListeners = new ArrayList<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId().equals("trigger-flow-listener-with-pause")) {
flowListeners.add(execution);
countDownLatch.countDown();
}
});
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.pause", "trigger-flow-with-pause");
assertThat(execution.getTaskRunList().size()).isEqualTo(3);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
List<Execution> triggeredExec = runnerUtils.awaitFlowExecutionNumber(
4,
MAIN_TENANT,
"io.kestra.tests.trigger.pause",
"trigger-flow-listener-with-pause");
assertTrue(countDownLatch.await(15, TimeUnit.SECONDS));
receive.blockLast();
assertThat(triggeredExec.size()).isEqualTo(4);
List<Execution> sortedExecs = triggeredExec.stream()
.sorted(Comparator.comparing(e -> e.getState().getEndDate().orElse(Instant.now())))
.toList();
assertThat(sortedExecs.get(0).getOutputs().get("status")).isEqualTo("RUNNING");
assertThat(sortedExecs.get(1).getOutputs().get("status")).isEqualTo("PAUSED");
assertThat(sortedExecs.get(2).getOutputs().get("status")).isEqualTo("RUNNING");
assertThat(sortedExecs.get(3).getOutputs().get("status")).isEqualTo("SUCCESS");
assertThat(flowListeners.size()).isEqualTo(4);
assertThat(flowListeners.get(0).getOutputs().get("status")).isEqualTo("RUNNING");
assertThat(flowListeners.get(1).getOutputs().get("status")).isEqualTo("PAUSED");
assertThat(flowListeners.get(2).getOutputs().get("status")).isEqualTo("RUNNING");
assertThat(flowListeners.get(3).getOutputs().get("status")).isEqualTo("SUCCESS");
}
public void triggerWithConcurrencyLimit(String tenantId) throws QueueException, TimeoutException {
Execution execution1 = runnerUtils.runOneUntilRunning(tenantId, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
Execution execution2 = runnerUtils.runOne(tenantId, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
public void triggerWithConcurrencyLimit() throws QueueException, TimeoutException, InterruptedException {
CountDownLatch countDownLatch = new CountDownLatch(5);
List<Execution> flowListeners = new ArrayList<>();
List<Execution> triggeredExec = runnerUtils.awaitFlowExecutionNumber(
5,
tenantId,
"io.kestra.tests.trigger.concurrency",
"trigger-flow-listener-with-concurrency-limit");
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId().equals("trigger-flow-listener-with-concurrency-limit")) {
flowListeners.add(execution);
countDownLatch.countDown();
}
});
assertThat(triggeredExec.size()).isEqualTo(5);
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("QUEUED") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
assertThat(triggeredExec.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
Execution execution1 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
Execution execution2 = runnerUtils.runOneUntilRunning(MAIN_TENANT, "io.kestra.tests.trigger.concurrency", "trigger-flow-with-concurrency-limit");
assertTrue(countDownLatch.await(15, TimeUnit.SECONDS));
receive.blockLast();
assertThat(flowListeners.size()).isEqualTo(5);
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution1.getId()))).isTrue();
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("QUEUED") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("RUNNING") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
assertThat(flowListeners.stream().anyMatch(e -> e.getOutputs().get("status").equals("SUCCESS") && e.getOutputs().get("executionId").equals(execution2.getId()))).isTrue();
}
}

View File

@@ -48,7 +48,7 @@ public class InputsTest {
private QueueInterface<LogEntry> logQueue;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
public static Map<String, Object> inputs = ImmutableMap.<String, Object>builder()
.put("string", "myString")

View File

@@ -24,7 +24,7 @@ import static org.assertj.core.api.Assertions.assertThat;
class ListenersTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Inject
private LocalFlowRepositoryLoader repositoryLoader;

View File

@@ -1,168 +1,243 @@
package io.kestra.core.runners;
import io.kestra.core.models.flows.State.Type;
import io.kestra.core.queues.QueueException;
import io.kestra.core.repositories.ArrayListTotal;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.utils.TestsUtils;
import io.micronaut.context.ApplicationContext;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.micronaut.data.model.Pageable;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import reactor.core.publisher.Flux;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Singleton
public class MultipleConditionTriggerCaseTest {
public static final String NAMESPACE = "io.kestra.tests.trigger";
@Inject
@Named(QueueFactoryInterface.EXECUTION_NAMED)
protected QueueInterface<Execution> executionQueue;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
protected FlowRepositoryInterface flowRepository;
@Inject
protected ExecutionRepositoryInterface executionRepository;
@Inject
protected ApplicationContext applicationContext;
public void trigger() throws InterruptedException, TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(3);
ConcurrentHashMap<String, Execution> ended = new ConcurrentHashMap<>();
List<String> watchedExecutions = List.of("trigger-multiplecondition-flow-a",
"trigger-multiplecondition-flow-b",
"trigger-multiplecondition-listener"
);
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (watchedExecutions.contains(execution.getFlowId()) && execution.getState().getCurrent() == State.Type.SUCCESS) {
ended.put(execution.getId(), execution);
countDownLatch.countDown();
}
});
// first one
Execution execution = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "trigger-multiplecondition-flow-a");
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
"trigger-multiplecondition-flow-a", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// wait a little to be sure that the trigger is not launching execution
Thread.sleep(1000);
ArrayListTotal<Execution> flowBExecutions = executionRepository.findByFlowId(MAIN_TENANT,
NAMESPACE, "trigger-multiplecondition-flow-b", Pageable.UNPAGED);
ArrayListTotal<Execution> listenerExecutions = executionRepository.findByFlowId(MAIN_TENANT,
NAMESPACE, "trigger-multiplecondition-listener", Pageable.UNPAGED);
assertThat(flowBExecutions).isEmpty();
assertThat(listenerExecutions).isEmpty();
assertThat(ended.size()).isEqualTo(1);
// second one
execution = runnerUtils.runOne(MAIN_TENANT, NAMESPACE, "trigger-multiplecondition-flow-b");
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
"trigger-multiplecondition-flow-b", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// trigger is done
Execution triggerExecution = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS),
MAIN_TENANT, NAMESPACE, "trigger-multiplecondition-listener");
assertTrue(countDownLatch.await(10, TimeUnit.SECONDS));
receive.blockLast();
assertThat(ended.size()).isEqualTo(3);
Flow flow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests.trigger",
"trigger-multiplecondition-listener").orElseThrow();
Execution triggerExecution = ended.entrySet()
.stream()
.filter(e -> e.getValue().getFlowId().equals(flow.getId()))
.findFirst()
.map(Map.Entry::getValue)
.orElseThrow();
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggerExecution.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(triggerExecution.getTrigger().getVariables().get("namespace")).isEqualTo(
NAMESPACE);
assertThat(triggerExecution.getTrigger().getVariables().get("namespace")).isEqualTo("io.kestra.tests.trigger");
assertThat(triggerExecution.getTrigger().getVariables().get("flowId")).isEqualTo("trigger-multiplecondition-flow-b");
}
public void failed(String tenantId) throws InterruptedException, TimeoutException, QueueException {
public void failed() throws InterruptedException, TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<Execution> listener = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("trigger-flow-listener-namespace-condition")
&& execution.getState().getCurrent().isTerminated()) {
listener.set(execution);
countDownLatch.countDown();
}
});
// first one
Execution execution = runnerUtils.runOne(tenantId, NAMESPACE,
"trigger-multiplecondition-flow-c");
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
"trigger-multiplecondition-flow-c", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
// wait a little to be sure that the trigger is not launching execution
Thread.sleep(1000);
ArrayListTotal<Execution> byFlowId = executionRepository.findByFlowId(tenantId, NAMESPACE,
"trigger-multiplecondition-flow-d", Pageable.UNPAGED);
assertThat(byFlowId).isEmpty();
assertThat(listener.get()).isNull();
// second one
execution = runnerUtils.runOne(tenantId, NAMESPACE,
"trigger-multiplecondition-flow-d");
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger",
"trigger-multiplecondition-flow-d", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
Execution triggerExecution = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS),
tenantId, NAMESPACE, "trigger-flow-listener-namespace-condition");
// trigger was not done
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertTrue(countDownLatch.await(10, TimeUnit.SECONDS));
receive.blockLast();
assertThat(listener.get()).isNotNull();
assertThat(listener.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
public void flowTriggerPreconditions() throws TimeoutException, QueueException {
public void flowTriggerPreconditions()
throws InterruptedException, TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<Execution> flowTrigger = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId()
.equals("flow-trigger-preconditions-flow-listen")) {
flowTrigger.set(execution);
countDownLatch.countDown();
}
});
// flowA
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
"flow-trigger-preconditions-flow-a");
"flow-trigger-preconditions-flow-a", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// flowB: we trigger it two times, as flow-trigger-flow-preconditions-flow-listen is configured with resetOnSuccess: false it should be triggered two times
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
"flow-trigger-preconditions-flow-a");
"flow-trigger-preconditions-flow-a", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
"flow-trigger-preconditions-flow-b");
"flow-trigger-preconditions-flow-b", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// trigger is done
Execution triggerExecution = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS),
MAIN_TENANT, "io.kestra.tests.trigger.preconditions", "flow-trigger-preconditions-flow-listen");
assertTrue(countDownLatch.await(1, TimeUnit.SECONDS));
receive.blockLast();
assertThat(flowTrigger.get()).isNotNull();
Execution triggerExecution = flowTrigger.get();
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggerExecution.getTrigger().getVariables().get("outputs")).isNotNull();
assertThat((Map<String, Object>) triggerExecution.getTrigger().getVariables().get("outputs")).containsEntry("some", "value");
}
public void flowTriggerPreconditionsMergeOutputs(String tenantId) throws QueueException, TimeoutException {
public void flowTriggerPreconditionsMergeOutputs() throws QueueException, TimeoutException, InterruptedException {
// we do the same as in flowTriggerPreconditions() but we trigger flows in the opposite order to be sure that outputs are merged
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<Execution> flowTrigger = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId()
.equals("flow-trigger-preconditions-flow-listen")) {
flowTrigger.set(execution);
countDownLatch.countDown();
}
});
// flowB
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests.trigger.preconditions",
"flow-trigger-preconditions-flow-b");
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
"flow-trigger-preconditions-flow-b", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// flowA
execution = runnerUtils.runOne(tenantId, "io.kestra.tests.trigger.preconditions",
"flow-trigger-preconditions-flow-a");
execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.preconditions",
"flow-trigger-preconditions-flow-a", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// trigger is done
Execution triggerExecution = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS),
tenantId, "io.kestra.tests.trigger.preconditions", "flow-trigger-preconditions-flow-listen");
assertTrue(countDownLatch.await(1, TimeUnit.SECONDS));
receive.blockLast();
assertThat(flowTrigger.get()).isNotNull();
Execution triggerExecution = flowTrigger.get();
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggerExecution.getTrigger().getVariables().get("outputs")).isNotNull();
assertThat((Map<String, Object>) triggerExecution.getTrigger().getVariables().get("outputs")).containsEntry("some", "value");
}
public void flowTriggerOnPaused() throws TimeoutException, QueueException {
public void flowTriggerOnPaused()
throws InterruptedException, TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<Execution> flowTrigger = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getState().getCurrent() == State.Type.SUCCESS && execution.getFlowId()
.equals("flow-trigger-paused-listen")) {
flowTrigger.set(execution);
countDownLatch.countDown();
}
});
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests.trigger.paused",
"flow-trigger-paused-flow");
"flow-trigger-paused-flow", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList().size()).isEqualTo(2);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// trigger is done
Execution triggerExecution = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().equals(Type.SUCCESS),
MAIN_TENANT, "io.kestra.tests.trigger.paused", "flow-trigger-paused-listen");
assertTrue(countDownLatch.await(1, TimeUnit.SECONDS));
receive.blockLast();
assertThat(flowTrigger.get()).isNotNull();
Execution triggerExecution = flowTrigger.get();
assertThat(triggerExecution.getTaskRunList().size()).isEqualTo(1);
assertThat(triggerExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}

View File

@@ -30,7 +30,7 @@ import static org.assertj.core.api.Assertions.assertThat;
@Singleton
public class PluginDefaultsCaseTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
public void taskDefaults() throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "plugin-defaults", Duration.ofSeconds(60));

View File

@@ -4,19 +4,29 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.Type;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.services.ExecutionService;
import java.time.Duration;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import reactor.core.publisher.Flux;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import static io.kestra.core.utils.Rethrow.throwRunnable;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Singleton
@@ -25,30 +35,38 @@ public class RestartCaseTest {
private FlowRepositoryInterface flowRepository;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Inject
private ExecutionService executionService;
@Inject
@Named(QueueFactoryInterface.EXECUTION_NAMED)
private QueueInterface<Execution> executionQueue;
public void restartFailedThenSuccess() throws Exception {
Flow flow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests", "restart_last_failed").orElseThrow();
Execution firstExecution = runnerUtils.runOne(MAIN_TENANT, flow.getNamespace(), flow.getId());
Execution firstExecution = runnerUtils.runOne(MAIN_TENANT, flow.getNamespace(), flow.getId(), Duration.ofSeconds(60));
assertThat(firstExecution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(firstExecution.getTaskRunList()).hasSize(3);
assertThat(firstExecution.getTaskRunList().get(2).getState().getCurrent()).isEqualTo(State.Type.FAILED);
// wait
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(3);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
execution -> execution.getState().getCurrent() == State.Type.SUCCESS && execution.getId().equals(firstExecution.getId()),
restartedExec
throwRunnable(() -> {
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(3);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
executionQueue.emit(restartedExec);
}),
Duration.ofSeconds(60)
);
assertThat(finishedRestartedExecution).isNotNull();
@@ -75,16 +93,19 @@ public class RestartCaseTest {
assertThat(firstExecution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
// wait
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(1);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
execution -> execution.getState().getCurrent() == State.Type.FAILED && execution.getTaskRunList().getFirst().getAttempts().size() == 2,
restartedExec
throwRunnable(() -> {
Execution restartedExec = executionService.restart(firstExecution, null);
executionQueue.emit(restartedExec);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(1);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
}),
Duration.ofSeconds(60)
);
assertThat(finishedRestartedExecution).isNotNull();
@@ -107,16 +128,19 @@ public class RestartCaseTest {
assertThat(firstExecution.getTaskRunList().get(3).getState().getCurrent()).isEqualTo(State.Type.FAILED);
// wait
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(4);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
execution -> execution.getState().getCurrent() == State.Type.FAILED && execution.findTaskRunsByTaskId("failStep").stream().findFirst().get().getAttempts().size() == 2,
restartedExec
throwRunnable(() -> {
Execution restartedExec = executionService.restart(firstExecution, null);
executionQueue.emit(restartedExec);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(4);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
}),
Duration.ofSeconds(60)
);
assertThat(finishedRestartedExecution).isNotNull();
@@ -139,19 +163,21 @@ public class RestartCaseTest {
assertThat(firstExecution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
// wait
Execution restartedExec = executionService.replay(firstExecution, firstExecution.findTaskRunByTaskIdAndValue("2_end", List.of()).getId(), null);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
assertThat(restartedExec.getState().getHistories()).hasSize(4);
assertThat(restartedExec.getTaskRunList()).hasSize(20);
assertThat(restartedExec.getTaskRunList().get(19).getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
assertThat(restartedExec.getId()).isNotEqualTo(firstExecution.getId());
assertThat(restartedExec.getTaskRunList().get(1).getId()).isNotEqualTo(firstExecution.getTaskRunList().get(1).getId());
Execution finishedRestartedExecution = runnerUtils.awaitChildExecution(
flow,
firstExecution,
restartedExec,
throwRunnable(() -> {
Execution restartedExec = executionService.replay(firstExecution, firstExecution.findTaskRunByTaskIdAndValue("2_end", List.of()).getId(), null);
executionQueue.emit(restartedExec);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
assertThat(restartedExec.getState().getHistories()).hasSize(4);
assertThat(restartedExec.getTaskRunList()).hasSize(20);
assertThat(restartedExec.getTaskRunList().get(19).getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
assertThat(restartedExec.getId()).isNotEqualTo(firstExecution.getId());
assertThat(restartedExec.getTaskRunList().get(1).getId()).isNotEqualTo(firstExecution.getTaskRunList().get(1).getId());
}),
Duration.ofSeconds(60)
);
@@ -169,58 +195,71 @@ public class RestartCaseTest {
Execution restart = executionService.restart(execution, null);
assertThat(restart.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
Execution restartEnded = runnerUtils.emitAndAwaitExecution(
Execution restartEnded = runnerUtils.awaitExecution(
e -> e.getState().getCurrent() == State.Type.FAILED,
restart,
Duration.ofSeconds(60)
throwRunnable(() -> executionQueue.emit(restart)),
Duration.ofSeconds(120)
);
assertThat(restartEnded.getState().getCurrent()).isEqualTo(State.Type.FAILED);
Execution newRestart = executionService.restart(restartEnded, null);
restartEnded = runnerUtils.emitAndAwaitExecution(
restartEnded = runnerUtils.awaitExecution(
e -> e.getState().getCurrent() == State.Type.FAILED,
newRestart,
Duration.ofSeconds(60)
throwRunnable(() -> executionQueue.emit(newRestart)),
Duration.ofSeconds(120)
);
assertThat(restartEnded.getState().getCurrent()).isEqualTo(State.Type.FAILED);
}
public void restartSubflow() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
Flux<Execution> receiveSubflows = TestsUtils.receive(executionQueue, either -> {
Execution subflowExecution = either.getLeft();
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isFailed()) {
countDownLatch.countDown();
}
});
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "restart-parent");
assertThat(execution.getTaskRunList()).hasSize(3);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
// here we must have 1 failed subflows
runnerUtils.awaitFlowExecution(e -> e.getState().getCurrent().isFailed(), MAIN_TENANT, "io.kestra.tests", "restart-child");
assertTrue(countDownLatch.await(1, TimeUnit.MINUTES));
receiveSubflows.blockLast();
// there is 3 values so we must restart it 3 times to end the 3 subflows
CountDownLatch successLatch = new CountDownLatch(3);
receiveSubflows = TestsUtils.receive(executionQueue, either -> {
Execution subflowExecution = either.getLeft();
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isSuccess()) {
successLatch.countDown();
}
});
Execution restarted1 = executionService.restart(execution, null);
execution = runnerUtils.emitAndAwaitExecution(
execution = runnerUtils.awaitExecution(
e -> e.getState().getCurrent() == State.Type.FAILED && e.getFlowId().equals("restart-parent"),
restarted1,
throwRunnable(() -> executionQueue.emit(restarted1)),
Duration.ofSeconds(10)
);
Execution restarted2 = executionService.restart(execution, null);
execution = runnerUtils.emitAndAwaitExecution(
execution = runnerUtils.awaitExecution(
e -> e.getState().getCurrent() == State.Type.FAILED && e.getFlowId().equals("restart-parent"),
restarted2,
throwRunnable(() -> executionQueue.emit(restarted2)),
Duration.ofSeconds(10)
);
Execution restarted3 = executionService.restart(execution, null);
execution = runnerUtils.emitAndAwaitExecution(
execution = runnerUtils.awaitExecution(
e -> e.getState().getCurrent() == State.Type.SUCCESS && e.getFlowId().equals("restart-parent"),
restarted3,
throwRunnable(() -> executionQueue.emit(restarted3)),
Duration.ofSeconds(10)
);
assertThat(execution.getTaskRunList()).hasSize(6);
List<Execution> childExecutions = runnerUtils.awaitFlowExecutionNumber(3, MAIN_TENANT, "io.kestra.tests", "restart-child");
List<Execution> successfulRestart = childExecutions.stream()
.filter(e -> e.getState().getCurrent().equals(Type.SUCCESS)).toList();
assertThat(successfulRestart).hasSize(3);
assertTrue(successLatch.await(1, TimeUnit.MINUTES));
receiveSubflows.blockLast();
}
public void restartFailedWithFinally() throws Exception {
@@ -233,15 +272,18 @@ public class RestartCaseTest {
assertThat(firstExecution.getTaskRunList().get(1).getState().getCurrent()).isEqualTo(State.Type.FAILED);
// wait
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess(),
restartedExec,
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess() && execution.getId().equals(firstExecution.getId()),
throwRunnable(() -> {
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
executionQueue.emit(restartedExec);
}),
Duration.ofSeconds(60)
);
@@ -267,18 +309,21 @@ public class RestartCaseTest {
assertThat(firstExecution.getTaskRunList().get(1).getState().getCurrent()).isEqualTo(State.Type.FAILED);
// wait
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
Execution finishedRestartedExecution = runnerUtils.awaitExecution(
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess() && execution.getId().equals(firstExecution.getId()),
throwRunnable(() -> {
Execution restartedExec = executionService.restart(firstExecution, null);
assertThat(restartedExec).isNotNull();
assertThat(restartedExec.getId()).isEqualTo(firstExecution.getId());
assertThat(restartedExec.getParentId()).isNull();
assertThat(restartedExec.getTaskRunList().size()).isEqualTo(2);
assertThat(restartedExec.getState().getCurrent()).isEqualTo(State.Type.RESTARTED);
Execution finishedRestartedExecution = runnerUtils.emitAndAwaitExecution(
execution -> executionService.isTerminated(flow, execution) && execution.getState().isSuccess(),
restartedExec,
executionQueue.emit(restartedExec);
}),
Duration.ofSeconds(60)
);
assertThat(finishedRestartedExecution).isNotNull();
assertThat(finishedRestartedExecution.getId()).isEqualTo(firstExecution.getId());
assertThat(finishedRestartedExecution.getParentId()).isNull();

View File

@@ -98,7 +98,7 @@ class RunContextTest {
private FlowInputOutput flowIO;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Inject
protected LocalFlowRepositoryLoader repositoryLoader;

View File

@@ -16,7 +16,7 @@ import static org.assertj.core.api.Assertions.assertThat;
@Singleton
public class SLATestCase {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
public void maxDurationSLAShouldFail() throws QueueException, TimeoutException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "sla-max-duration-fail");
@@ -36,14 +36,14 @@ public class SLATestCase {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
public void executionConditionSLAShouldCancel(String tenantId) throws QueueException, TimeoutException {
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "CANCEL"));
public void executionConditionSLAShouldCancel() throws QueueException, TimeoutException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "CANCEL"));
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.CANCELLED);
}
public void executionConditionSLAShouldLabel(String tenantId) throws QueueException, TimeoutException {
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "LABEL"));
public void executionConditionSLAShouldLabel() throws QueueException, TimeoutException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "sla-execution-condition", null, (f, e) -> Map.of("string", "LABEL"));
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getLabels()).contains(new Label("sla", "violated"));

View File

@@ -3,31 +3,54 @@ package io.kestra.core.runners;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.models.flows.State.Type;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import reactor.core.publisher.Flux;
import java.time.ZonedDateTime;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Singleton
public class ScheduleDateCaseTest {
@Inject
private FlowRepositoryInterface flowRepository;
@Inject
private TestRunnerUtils runnerUtils;
@Named(QueueFactoryInterface.EXECUTION_NAMED)
protected QueueInterface<Execution> executionQueue;
public void shouldScheduleOnDate(String tenantId) throws QueueException {
public void shouldScheduleOnDate() throws QueueException, InterruptedException {
ZonedDateTime scheduleOn = ZonedDateTime.now().plusSeconds(1);
Flow flow = flowRepository.findById(tenantId, "io.kestra.tests", "minimal").orElseThrow();
Flow flow = flowRepository.findById(MAIN_TENANT, "io.kestra.tests", "minimal").orElseThrow();
Execution execution = Execution.newExecution(flow, null, null, Optional.of(scheduleOn));
assertThat(execution.getScheduleDate()).isEqualTo(scheduleOn.toInstant());
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.CREATED);
this.executionQueue.emit(execution);
runnerUtils.emitAndAwaitExecution(e -> e.getState().getCurrent().equals(Type.SUCCESS), execution);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.CREATED);
assertThat(execution.getScheduleDate()).isEqualTo(scheduleOn.toInstant());
CountDownLatch latch1 = new CountDownLatch(1);
Flux<Execution> receive = TestsUtils.receive(executionQueue, e -> {
if (e.getLeft().getId().equals(execution.getId())) {
if (e.getLeft().getState().getCurrent() == State.Type.SUCCESS) {
latch1.countDown();
}
}
});
assertTrue(latch1.await(1, TimeUnit.MINUTES));
receive.blockLast();
}
}

View File

@@ -32,7 +32,7 @@ public class SkipExecutionCaseTest {
protected QueueInterface<Execution> executionQueue;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
private ExecutionRepositoryInterface executionRepository;

View File

@@ -30,7 +30,7 @@ public class TaskCacheTest {
static final AtomicInteger COUNTER = new AtomicInteger(0);
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@BeforeEach
void resetCounter() {

View File

@@ -33,7 +33,7 @@ public class TaskWithAllowFailureTest {
private FlowInputOutput flowIO;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@ExecuteFlow("flows/valids/task-allow-failure-runnable.yml")

View File

@@ -9,7 +9,6 @@ import io.kestra.core.queues.QueueException;
import io.kestra.core.storages.StorageInterface;
import jakarta.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.io.File;
@@ -35,7 +34,7 @@ public class TaskWithAllowWarningTest {
private FlowInputOutput flowIO;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@ExecuteFlow("flows/valids/task-allow-warning-runnable.yml")
@@ -55,7 +54,6 @@ public class TaskWithAllowWarningTest {
}
@Test
@Disabled("This test does not test failing in subflow foreach as the subflow is not called, needs to be rework before reactivation")
@LoadFlows({"flows/valids/task-allow-warning-executable-foreachitem.yml"})
void executableTask_ForEachItem() throws TimeoutException, QueueException, URISyntaxException, IOException {
URI file = storageUpload();

View File

@@ -6,8 +6,6 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import org.junit.jupiter.api.Test;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
@KestraTest(startRunner = true)
@@ -33,15 +31,4 @@ class TaskWithRunIfTest {
assertThat(execution.findTaskRunsByTaskId("log_test").getFirst().getState().getCurrent()).isEqualTo(State.Type.SKIPPED);
}
@Test
@ExecuteFlow("flows/valids/task-runif-executionupdating.yml")
void executionUpdatingTask(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getTaskRunList()).hasSize(5);
assertThat(execution.findTaskRunsByTaskId("skipSetVariables").getFirst().getState().getCurrent()).isEqualTo(State.Type.SKIPPED);
assertThat(execution.findTaskRunsByTaskId("skipUnsetVariables").getFirst().getState().getCurrent()).isEqualTo(State.Type.SKIPPED);
assertThat(execution.findTaskRunsByTaskId("unsetVariables").getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.findTaskRunsByTaskId("setVariables").getFirst().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getVariables()).containsEntry("list", List.of(42));
}
}

View File

@@ -18,7 +18,6 @@ import org.assertj.core.api.AbstractObjectAssert;
import org.assertj.core.api.ObjectAssert;
import org.junit.jupiter.api.Test;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.List;
@@ -38,7 +37,7 @@ class TestSuiteTest {
protected QueueInterface<Execution> executionQueue;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
protected FlowRepositoryInterface flowRepository;
@@ -147,7 +146,7 @@ class TestSuiteTest {
.state(new State())
.build();
return runnerUtils.runOne(execution, flow, Duration.ofSeconds(10));
return runnerUtils.runOne(execution, flow, null);
}
private static AbstractObjectAssert<?, Object> assertOutputForTask(Execution executionResult, String taskId) {

View File

@@ -14,13 +14,10 @@ import java.util.Date;
import java.util.Map;
import jakarta.inject.Inject;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.TestInstance.Lifecycle;
import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
@TestInstance(Lifecycle.PER_CLASS)
class DateFilterTest {
public static final ZonedDateTime NOW = ZonedDateTime.parse("2013-09-08T16:19:12.123456+01");
@@ -147,7 +144,7 @@ class DateFilterTest {
)
);
assertThat(render).isEqualTo("1378653552123456");
assertThat(render).isEqualTo("1378653552000123456");
}
@Test

View File

@@ -14,14 +14,10 @@ public class FunctionTestUtils {
}
public static Map<String, Object> getVariables(String namespace) {
return getVariables(MAIN_TENANT, namespace);
}
public static Map<String, Object> getVariables(String tenantId, String namespace) {
return Map.of(
"flow", Map.of(
"id", "kv",
"tenantId", tenantId,
"tenantId", MAIN_TENANT,
"namespace", namespace)
);
}

View File

@@ -12,7 +12,6 @@ import io.kestra.core.storages.StorageInterface;
import io.kestra.core.storages.kv.InternalKVStore;
import io.kestra.core.storages.kv.KVStore;
import io.kestra.core.storages.kv.KVValueAndMetadata;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import java.io.IOException;
import java.net.URI;
@@ -30,14 +29,18 @@ public class KvFunctionTest {
@Inject
VariableRenderer variableRenderer;
@BeforeEach
void reset() throws IOException {
storageInterface.deleteByPrefix(MAIN_TENANT, null, URI.create(StorageContext.kvPrefix("io.kestra.tests")));
}
@Test
void shouldGetValueFromKVGivenExistingKey() throws IllegalVariableEvaluationException, IOException {
// Given
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
KVStore kv = new InternalKVStore(tenant, "io.kestra.tests", storageInterface);
KVStore kv = new InternalKVStore(MAIN_TENANT, "io.kestra.tests", storageInterface);
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
Map<String, Object> variables = getVariables("io.kestra.tests");
// When
String rendered = variableRenderer.render("{{ kv('my-key') }}", variables);
@@ -49,14 +52,13 @@ public class KvFunctionTest {
@Test
void shouldGetValueFromKVGivenExistingKeyWithInheritance() throws IllegalVariableEvaluationException, IOException {
// Given
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
KVStore kv = new InternalKVStore(tenant, "my.company", storageInterface);
KVStore kv = new InternalKVStore(MAIN_TENANT, "my.company", storageInterface);
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
KVStore firstKv = new InternalKVStore(tenant, "my", storageInterface);
KVStore firstKv = new InternalKVStore(MAIN_TENANT, "my", storageInterface);
firstKv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "firstValue")));
Map<String, Object> variables = getVariables(tenant, "my.company.team");
Map<String, Object> variables = getVariables("my.company.team");
// When
String rendered = variableRenderer.render("{{ kv('my-key') }}", variables);
@@ -68,11 +70,10 @@ public class KvFunctionTest {
@Test
void shouldNotGetValueFromKVWithGivenNamespaceAndInheritance() throws IOException {
// Given
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
KVStore kv = new InternalKVStore(tenant, "kv", storageInterface);
KVStore kv = new InternalKVStore(MAIN_TENANT, "kv", storageInterface);
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
Map<String, Object> variables = getVariables(tenant, "my.company.team");
Map<String, Object> variables = getVariables("my.company.team");
// When
Assertions.assertThrows(IllegalVariableEvaluationException.class, () ->
@@ -82,11 +83,10 @@ public class KvFunctionTest {
@Test
void shouldGetValueFromKVGivenExistingAndNamespace() throws IllegalVariableEvaluationException, IOException {
// Given
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
KVStore kv = new InternalKVStore(tenant, "kv", storageInterface);
KVStore kv = new InternalKVStore(MAIN_TENANT, "kv", storageInterface);
kv.put("my-key", new KVValueAndMetadata(null, Map.of("field", "value")));
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
Map<String, Object> variables = getVariables("io.kestra.tests");
// When
String rendered = variableRenderer.render("{{ kv('my-key', namespace='kv') }}", variables);
@@ -98,8 +98,7 @@ public class KvFunctionTest {
@Test
void shouldGetEmptyGivenNonExistingKeyAndErrorOnMissingFalse() throws IllegalVariableEvaluationException {
// Given
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
Map<String, Object> variables = getVariables("io.kestra.tests");
// When
String rendered = variableRenderer.render("{{ kv('my-key', errorOnMissing=false) }}", variables);
@@ -111,8 +110,7 @@ public class KvFunctionTest {
@Test
void shouldFailGivenNonExistingKeyAndErrorOnMissingTrue() {
// Given
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
Map<String, Object> variables = getVariables("io.kestra.tests");
// When
IllegalVariableEvaluationException exception = Assertions.assertThrows(IllegalVariableEvaluationException.class, () -> {
@@ -126,8 +124,7 @@ public class KvFunctionTest {
@Test
void shouldFailGivenNonExistingKeyUsingDefaults() {
// Given
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
Map<String, Object> variables = getVariables(tenant, "io.kestra.tests");
Map<String, Object> variables = getVariables("io.kestra.tests");
// When
IllegalVariableEvaluationException exception = Assertions.assertThrows(IllegalVariableEvaluationException.class, () -> {
variableRenderer.render("{{ kv('my-key') }}", variables);

View File

@@ -1,52 +0,0 @@
package io.kestra.core.runners.pebble.functions;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.runners.VariableRenderer;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
@KestraTest
public class NanoIDFuntionTest {
@Inject
VariableRenderer variableRenderer;
@Test
void checkStandardNanoId() throws Exception {
String rendered =
variableRenderer.render(
"{{ nanoId() }}", Collections.emptyMap());
assertThat(!rendered.isEmpty()).as(rendered).isTrue();
assertThat(rendered.length()).isEqualTo(21L);
}
@Test
void checkDifferentLength() throws Exception {
String rendered =
variableRenderer.render(
"{{ nanoId(length) }}", Map.of("length", 8L));
assertThat(!rendered.isEmpty()).as(rendered).isTrue();
assertThat(rendered.length()).isEqualTo(8L);
}
@Test
void checkDifferentAlphabet() throws Exception {
String rendered =
variableRenderer.render(
"{{ nanoId(length,alphabet) }}", Map.of("length", 21L, "alphabet", ":;<=>?@"));
assertThat(!rendered.isEmpty()).as(rendered).isTrue();
assertThat(rendered.length()).isEqualTo(21L);
for (char c : rendered.toCharArray()) {
assertThat(c).isGreaterThanOrEqualTo(':');
assertThat(c).isLessThanOrEqualTo('@');
}
}
}

View File

@@ -12,7 +12,7 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.runners.VariableRenderer;
import io.kestra.core.utils.TestsUtils;
import io.micronaut.test.annotation.MockBean;
@@ -41,7 +41,7 @@ public class SecretFunctionTest {
QueueInterface<LogEntry> logQueue;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Inject
private SecretService secretService;

View File

@@ -21,11 +21,9 @@ import java.nio.file.Path;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalUnit;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -93,7 +91,7 @@ class InternalKVStoreTest {
String description = "myDescription";
kv.put(TEST_KV_KEY, new KVValueAndMetadata(new KVMetadata(description, Duration.ofMinutes(5)), complexValue));
kv.put("key-without-expiration", new KVValueAndMetadata(new KVMetadata(null, (Duration) null), complexValue));
kv.put("key-without-expiration", new KVValueAndMetadata(new KVMetadata(null, null), complexValue));
kv.put("expired-key", new KVValueAndMetadata(new KVMetadata(null, Duration.ofMillis(1)), complexValue));
List<KVEntry> list = kv.listAll();
@@ -215,22 +213,6 @@ class InternalKVStoreTest {
// When
Assertions.assertThrows(ResourceExpiredException.class, () -> kv.getValue(TEST_KV_KEY));
}
@Test
void shouldGetKVValueAndMetadata() throws IOException {
// Given
final InternalKVStore kv = kv();
KVValueAndMetadata val = new KVValueAndMetadata(new KVMetadata(null, Duration.ofMinutes(5)), complexValue);
kv.put(TEST_KV_KEY, val);
// When
Optional<KVValueAndMetadata> result = kv.findMetadataAndValue(TEST_KV_KEY);
// Then
Assertions.assertEquals(val.value(), result.get().value());
Assertions.assertEquals(val.metadata().getDescription(), result.get().metadata().getDescription());
Assertions.assertEquals(val.metadata().getExpirationDate().truncatedTo(ChronoUnit.MILLIS), result.get().metadata().getExpirationDate().truncatedTo(ChronoUnit.MILLIS));
}
@Test
void illegalKey() {

View File

@@ -5,9 +5,6 @@ import org.junit.jupiter.api.Test;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThatObject;
import static org.hamcrest.MatcherAssert.assertThat;
class VersionTest {
@Test
@@ -30,27 +27,27 @@ class VersionTest {
}
@Test
void shouldCreateVersionFromStringGivenMajorMinorPatchVersion() {
void shouldCreateVersionFromStringGivenMajorMinorIncrementVersion() {
Version version = Version.of("1.2.3");
Assertions.assertEquals(1, version.majorVersion());
Assertions.assertEquals(2, version.minorVersion());
Assertions.assertEquals(3, version.patchVersion());
Assertions.assertEquals(3, version.incrementalVersion());
}
@Test
void shouldCreateVersionFromPrefixedStringGivenMajorMinorPatchVersion() {
void shouldCreateVersionFromPrefixedStringGivenMajorMinorIncrementVersion() {
Version version = Version.of("v1.2.3");
Assertions.assertEquals(1, version.majorVersion());
Assertions.assertEquals(2, version.minorVersion());
Assertions.assertEquals(3, version.patchVersion());
Assertions.assertEquals(3, version.incrementalVersion());
}
@Test
void shouldCreateVersionFromStringGivenMajorMinorPatchAndQualifierVersion() {
void shouldCreateVersionFromStringGivenMajorMinorIncrementAndQualifierVersion() {
Version version = Version.of("1.2.3-SNAPSHOT");
Assertions.assertEquals(1, version.majorVersion());
Assertions.assertEquals(2, version.minorVersion());
Assertions.assertEquals(3, version.patchVersion());
Assertions.assertEquals(3, version.incrementalVersion());
Assertions.assertEquals("SNAPSHOT", version.qualifier().toString());
}
@@ -59,7 +56,7 @@ class VersionTest {
Version version = Version.of("1.2.3-RC0-SNAPSHOT");
Assertions.assertEquals(1, version.majorVersion());
Assertions.assertEquals(2, version.minorVersion());
Assertions.assertEquals(3, version.patchVersion());
Assertions.assertEquals(3, version.incrementalVersion());
Assertions.assertEquals("RC0-SNAPSHOT", version.qualifier().toString());
}
@@ -85,13 +82,13 @@ class VersionTest {
}
@Test
void shouldGetLatestVersionGivenMajorMinorPatchVersions() {
void shouldGetLatestVersionGivenMajorMinorIncrementalVersions() {
Version result = Version.getLatest(Version.of("1.0.9"), Version.of("1.0.10"), Version.of("1.0.11"));
Assertions.assertEquals(Version.of("1.0.11"), result);
}
@Test
public void shouldGetOldestVersionGivenMajorMinorPatchVersions() {
public void shouldGetOldestVersionGivenMajorMinorIncrementalVersions() {
Version result = Version.getOldest(Version.of("1.0.9"), Version.of("1.0.10"), Version.of("1.0.11"));
Assertions.assertEquals(Version.of("1.0.9"), result);
}
@@ -145,49 +142,23 @@ class VersionTest {
@Test
public void shouldGetStableVersionGivenMajorMinorPatchVersion() {
// Given
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
// When - Then
assertThatObject(Version.getStable(Version.of("1.2.1"), versions)).isEqualTo(Version.of("1.2.1"));
assertThatObject(Version.getStable(Version.of("1.2.0"), versions)).isNull();
assertThatObject(Version.getStable(Version.of("1.2.4"), versions)).isNull();
Assertions.assertEquals(Version.of("1.2.1"), Version.getStable(Version.of("1.2.1"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
Assertions.assertEquals(Version.of("1.2.3"), Version.getStable(Version.of("1.2.0"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
}
@Test
public void shouldGetStableGivenMajorAndMinorVersionOnly() {
// Given
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
// When - Then
assertThatObject(Version.getStable(Version.of("1.2"), versions)).isEqualTo(Version.of("1.2.3"));
public void shouldGetStableVersionGivenMajorMinorVersion() {
Assertions.assertEquals(Version.of("1.2.3"), Version.getStable(Version.of("1.2"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
}
@Test
public void shouldGetStableGivenMajorVersionOnly() {
// Given
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
// When - Then
assertThatObject(Version.getStable(Version.of("1"), versions)).isEqualTo(Version.of("1.2.3"));
public void shouldGetStableVersionGivenMajorVersion() {
Assertions.assertEquals(Version.of("1.2.3"), Version.getStable(Version.of("1"), List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"))));
}
@Test
public void shouldGetNullForStableGivenMajorAndMinorVersionOnly() {
// Given
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
// When - Then
assertThatObject(Version.getStable(Version.of("2.0"), versions)).isNull();
assertThatObject(Version.getStable(Version.of("0.1"), versions)).isNull();
}
@Test
public void shouldGetNullForStableGivenMajorVersionOnly() {
// Given
List<Version> versions = List.of(Version.of("1.2.1"), Version.of("1.2.3"), Version.of("0.99.0"));
// When - Then
assertThatObject(Version.getStable(Version.of("2"), versions)).isNull();
public void shouldGetNullForStableVersionGivenNoCompatibleVersions() {
Assertions.assertNull(Version.getStable(Version.of("3.0"), List.of(Version.of("1.3.0"), Version.of("2.0.0"), Version.of("0.99.0"))));
Assertions.assertNull(Version.getStable(Version.of("0.1"), List.of(Version.of("1.3.0"), Version.of("2.0.0"), Version.of("0.99.0"))));
}
}

View File

@@ -15,7 +15,7 @@ class ExitTest {
@ExecuteFlow("flows/valids/exit.yaml")
void shouldExitTheExecution(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.WARNING);
assertThat(execution.getTaskRunList()).hasSize(2);
assertThat(execution.getTaskRunList().size()).isEqualTo(2);
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.WARNING);
}
@@ -23,19 +23,9 @@ class ExitTest {
@ExecuteFlow("flows/valids/exit-killed.yaml")
void shouldExitAndKillTheExecution(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.KILLED);
assertThat(execution.getTaskRunList()).hasSize(2);
assertThat(execution.getTaskRunList().size()).isEqualTo(2);
assertThat(execution.getTaskRunList().getFirst().getState().getCurrent()).isEqualTo(State.Type.KILLED);
assertThat(execution.getTaskRunList().get(1).getState().getCurrent()).isEqualTo(State.Type.KILLED);
}
@Test
@ExecuteFlow("flows/valids/exit-nested.yaml")
void shouldExitAndFailNestedIf(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(execution.getTaskRunList()).hasSize(4);
assertThat(execution.findTaskRunsByTaskId("if_some_bool").getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(execution.findTaskRunsByTaskId("nested_bool_check").getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(execution.findTaskRunsByTaskId("nested_was_false").getFirst().getState().getCurrent()).isEqualTo(State.Type.FAILED);
}
}

View File

@@ -8,7 +8,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import java.time.Duration;
import java.util.Map;
@@ -19,7 +19,7 @@ import org.junit.jupiter.api.Test;
public class FailTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@LoadFlows({"flows/valids/fail-on-switch.yaml"})

View File

@@ -5,7 +5,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.utils.Await;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
@@ -20,7 +20,7 @@ import static org.assertj.core.api.Assertions.assertThat;
class ResumeTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Inject
private ExecutionRepositoryInterface executionRepository;

View File

@@ -6,7 +6,7 @@ import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
import io.kestra.core.models.executions.Execution;
@@ -14,6 +14,7 @@ import io.kestra.core.models.flows.State;
import java.util.concurrent.TimeoutException;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
@KestraTest(startRunner = true)
@@ -21,7 +22,7 @@ class AllowFailureTest {
@Inject
private FlowInputOutput flowIO;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Test
@ExecuteFlow("flows/valids/allow-failure.yaml")

View File

@@ -7,7 +7,7 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.utils.TestsUtils;
import io.kestra.core.junit.annotations.LoadFlows;
import jakarta.inject.Inject;
@@ -31,7 +31,7 @@ class CorrelationIdTest {
@Named(QueueFactoryInterface.EXECUTION_NAMED)
private QueueInterface<Execution> executionQueue;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@LoadFlows({"flows/valids/subflow-parent.yaml",

View File

@@ -12,7 +12,7 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.models.validations.ModelValidator;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.serializers.YamlParser;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
@@ -33,7 +33,7 @@ public class DagTest {
ModelValidator modelValidator;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
private FlowInputOutput flowIO;

View File

@@ -4,7 +4,6 @@ import io.kestra.core.junit.annotations.ExecuteFlow;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.utils.TestsUtils;
import org.junit.jupiter.api.Test;
import io.kestra.core.exceptions.InternalException;
@@ -14,6 +13,7 @@ import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.RunnerUtils;
import java.time.Duration;
import java.util.*;
@@ -34,7 +34,7 @@ public class EachSequentialTest {
QueueInterface<LogEntry> logQueue;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@ExecuteFlow("flows/valids/each-sequential.yaml")
@@ -92,7 +92,7 @@ public class EachSequentialTest {
EachSequentialTest.eachNullTest(runnerUtils, logQueue);
}
public static void eachNullTest(TestRunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
public static void eachNullTest(RunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
List<LogEntry> logs = new CopyOnWriteArrayList<>();
Flux<LogEntry> receive = TestsUtils.receive(logQueue, either -> logs.add(either.getLeft()));

View File

@@ -6,8 +6,9 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.time.Duration;
@@ -23,7 +24,7 @@ class FinallyTest {
public static final String NAMESPACE = "io.kestra.tests";
private static final String TENANT_ID = "tenant1";
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
private FlowInputOutput flowIO;
@@ -348,7 +349,7 @@ class FinallyTest {
TENANT_ID,
NAMESPACE, "finally-flow-error", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, Map.of("failed", true)),
Duration.ofSeconds(20)
Duration.ofSeconds(60)
);
assertThat(execution.getTaskRunList()).hasSize(6);

View File

@@ -4,79 +4,106 @@ import com.google.common.collect.ImmutableMap;
import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.runners.TestRunnerUtils;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import reactor.core.publisher.Flux;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
@Singleton
public class FlowCaseTest {
@Inject
@Named(QueueFactoryInterface.EXECUTION_NAMED)
QueueInterface<Execution> executionQueue;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
public void waitSuccess() throws Exception {
this.run("OK", State.Type.SUCCESS, State.Type.SUCCESS, 2, "default > amazing", true);
}
public void waitFailed(String tenantId) throws Exception {
this.run("THIRD", State.Type.FAILED, State.Type.FAILED, 4, "Error Trigger ! error-t1", true, tenantId);
public void waitFailed() throws Exception {
this.run("THIRD", State.Type.FAILED, State.Type.FAILED, 4, "Error Trigger ! error-t1", true);
}
public void invalidOutputs(String tenantId) throws Exception {
this.run("FIRST", State.Type.FAILED, State.Type.SUCCESS, 2, null, true, tenantId);
public void invalidOutputs() throws Exception {
this.run("FIRST", State.Type.FAILED, State.Type.SUCCESS, 2, null, true);
}
public void noLabels(String tenantId) throws Exception {
this.run("OK", State.Type.SUCCESS, State.Type.SUCCESS, 2, "default > amazing", false, tenantId);
public void noLabels() throws Exception {
this.run("OK", State.Type.SUCCESS, State.Type.SUCCESS, 2, "default > amazing", false);
}
public void oldTaskName() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("minimal") && execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
});
Execution execution = runnerUtils.runOne(
MAIN_TENANT,
"io.kestra.tests",
"subflow-old-task-name"
);
Execution triggered = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().isTerminated(), MAIN_TENANT, "io.kestra.tests",
"minimal");
countDownLatch.await(1, TimeUnit.MINUTES);
receive.blockLast();
assertThat(execution.getTaskRunList()).hasSize(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.getId());
assertThat(triggered.getTrigger().getType()).isEqualTo("io.kestra.core.tasks.flows.Subflow");
assertThat(triggered.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(triggered.getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
assertThat(triggered.getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
}
void run(String input, State.Type fromState, State.Type triggerState, int count, String outputs, boolean testInherited)
throws Exception {
run(input, fromState, triggerState, count, outputs, testInherited, MAIN_TENANT);
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.get().getId());
assertThat(triggered.get().getTrigger().getType()).isEqualTo("io.kestra.core.tasks.flows.Subflow");
assertThat(triggered.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(triggered.get().getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
assertThat(triggered.get().getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
}
@SuppressWarnings({"ResultOfMethodCallIgnored", "unchecked"})
void run(String input, State.Type fromState, State.Type triggerState, int count, String outputs, boolean testInherited, String tenantId) throws Exception {
void run(String input, State.Type fromState, State.Type triggerState, int count, String outputs, boolean testInherited) throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("switch") && execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
});
Execution execution = runnerUtils.runOne(
tenantId,
MAIN_TENANT,
"io.kestra.tests",
testInherited ? "task-flow" : "task-flow-inherited-labels",
null,
(f, e) -> ImmutableMap.of("string", input),
Duration.ofMinutes(1),
Duration.ofSeconds(15),
testInherited ? List.of(new Label("mainFlowExecutionLabel", "execFoo")) : List.of()
);
Execution triggered = runnerUtils.awaitFlowExecution(
e -> e.getState().getCurrent().isTerminated(), tenantId, "io.kestra.tests", "switch");
countDownLatch.await(1, TimeUnit.MINUTES);
receive.blockLast();
assertThat(execution.getTaskRunList()).hasSize(1);
assertThat(execution.getTaskRunList().getFirst().getAttempts()).hasSize(1);
@@ -87,27 +114,27 @@ public class FlowCaseTest {
assertThat(((Map<String, String>) execution.getTaskRunList().getFirst().getOutputs().get("outputs")).get("extracted")).contains(outputs);
}
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.getId());
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("executionId")).isEqualTo(triggered.get().getId());
if (outputs != null) {
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("state")).isEqualTo(triggered.getState().getCurrent().name());
assertThat(execution.getTaskRunList().getFirst().getOutputs().get("state")).isEqualTo(triggered.get().getState().getCurrent().name());
}
assertThat(triggered.getTrigger().getType()).isEqualTo("io.kestra.plugin.core.flow.Subflow");
assertThat(triggered.getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(triggered.getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
assertThat(triggered.getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
assertThat(triggered.get().getTrigger().getType()).isEqualTo("io.kestra.plugin.core.flow.Subflow");
assertThat(triggered.get().getTrigger().getVariables().get("executionId")).isEqualTo(execution.getId());
assertThat(triggered.get().getTrigger().getVariables().get("flowId")).isEqualTo(execution.getFlowId());
assertThat(triggered.get().getTrigger().getVariables().get("namespace")).isEqualTo(execution.getNamespace());
assertThat(triggered.getTaskRunList()).hasSize(count);
assertThat(triggered.getState().getCurrent()).isEqualTo(triggerState);
assertThat(triggered.get().getTaskRunList()).hasSize(count);
assertThat(triggered.get().getState().getCurrent()).isEqualTo(triggerState);
if (testInherited) {
assertThat(triggered.getLabels().size()).isEqualTo(6);
assertThat(triggered.getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("mainFlowExecutionLabel", "execFoo"), new Label("mainFlowLabel", "flowFoo"), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
assertThat(triggered.get().getLabels().size()).isEqualTo(6);
assertThat(triggered.get().getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("mainFlowExecutionLabel", "execFoo"), new Label("mainFlowLabel", "flowFoo"), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
} else {
assertThat(triggered.getLabels().size()).isEqualTo(4);
assertThat(triggered.getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
assertThat(triggered.getLabels()).doesNotContain(new Label("inherited", "label"));
assertThat(triggered.get().getLabels().size()).isEqualTo(4);
assertThat(triggered.get().getLabels()).contains(new Label(Label.CORRELATION_ID, execution.getId()), new Label("launchTaskLabel", "launchFoo"), new Label("switchFlowLabel", "switchFoo"), new Label("overriding", "child"));
assertThat(triggered.get().getLabels()).doesNotContain(new Label("inherited", "label"));
}
}
}

View File

@@ -5,7 +5,9 @@ import io.kestra.core.junit.annotations.LoadFlows;
import org.junit.jupiter.api.Test;
import jakarta.inject.Inject;
import org.junit.jupiter.api.TestInstance;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@KestraTest(startRunner = true)
class FlowTest {
@Inject
@@ -22,25 +24,25 @@ class FlowTest {
@Test
@LoadFlows(value = {"flows/valids/task-flow.yaml",
"flows/valids/task-flow-inherited-labels.yaml",
"flows/valids/switch.yaml"}, tenantId = "tenant1")
"flows/valids/switch.yaml"})
void waitFailed() throws Exception {
flowCaseTest.waitFailed("tenant1");
flowCaseTest.waitFailed();
}
@Test
@LoadFlows(value = {"flows/valids/task-flow.yaml",
@LoadFlows({"flows/valids/task-flow.yaml",
"flows/valids/task-flow-inherited-labels.yaml",
"flows/valids/switch.yaml"}, tenantId = "tenant2")
"flows/valids/switch.yaml"})
void invalidOutputs() throws Exception {
flowCaseTest.invalidOutputs("tenant2");
flowCaseTest.invalidOutputs();
}
@Test
@LoadFlows(value = {"flows/valids/task-flow.yaml",
@LoadFlows({"flows/valids/task-flow.yaml",
"flows/valids/task-flow-inherited-labels.yaml",
"flows/valids/switch.yaml"}, tenantId = "tenant3")
"flows/valids/switch.yaml"})
void noLabels() throws Exception {
flowCaseTest.noLabels("tenant3");
flowCaseTest.noLabels();
}
@Test

View File

@@ -4,17 +4,20 @@ import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.repositories.ArrayListTotal;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.services.ExecutionService;
import io.kestra.core.storages.StorageInterface;
import io.micronaut.data.model.Pageable;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import reactor.core.publisher.Flux;
import java.io.BufferedReader;
import java.io.File;
@@ -28,25 +31,34 @@ import java.nio.file.Files;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import static io.kestra.core.models.flows.State.Type.FAILED;
import static io.kestra.core.models.flows.State.Type.SUCCESS;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static io.kestra.core.utils.Rethrow.throwRunnable;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Slf4j
@Singleton
public class ForEachItemCaseTest {
static final String TEST_NAMESPACE = "io.kestra.tests";
@Inject
@Named(QueueFactoryInterface.EXECUTION_NAMED)
private QueueInterface<Execution> executionQueue;
@Inject
private StorageInterface storageInterface;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
private FlowInputOutput flowIO;
@@ -54,19 +66,28 @@ public class ForEachItemCaseTest {
@Inject
private ExecutionService executionService;
@Inject
private ExecutionRepositoryInterface executionRepository;
@SuppressWarnings("unchecked")
public void forEachItem() throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = storageUpload(MAIN_TENANT);
public void forEachItem() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(26);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("for-each-item-subflow") && execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
});
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
Duration.ofSeconds(30));
// we should have triggered 26 subflows
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow");
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
receive.blockLast();
// assert on the main flow execution
assertThat(execution.getTaskRunList()).hasSize(4);
@@ -82,20 +103,19 @@ public class ForEachItemCaseTest {
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
// assert on the last subflow execution
Execution triggered = triggeredExecs.getLast();
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow");
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.getTaskRunList()).hasSize(1);
Optional<Label> correlationId = triggered.getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow");
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.get().getTaskRunList()).hasSize(1);
Optional<Label> correlationId = triggered.get().getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
assertThat(correlationId.isPresent()).isTrue();
assertThat(correlationId.get().value()).isEqualTo(execution.getId());
}
public void forEachItemEmptyItems(String tenantId) throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = emptyItems(tenantId);
public void forEachItemEmptyItems() throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = emptyItems();
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
Execution execution = runnerUtils.runOne(tenantId, TEST_NAMESPACE, "for-each-item", null,
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
Duration.ofSeconds(30));
@@ -107,8 +127,21 @@ public class ForEachItemCaseTest {
}
@SuppressWarnings("unchecked")
public void forEachItemNoWait() throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = storageUpload(MAIN_TENANT);
public void forEachItemNoWait() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(26);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("for-each-item-subflow-sleep")) {
if (execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
}
});
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString());
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-no-wait", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
@@ -116,9 +149,7 @@ public class ForEachItemCaseTest {
// assert that not all subflows ran (depending on the speed of execution, there can be some)
// be careful that it's racy.
ArrayListTotal<Execution> subFlowExecs = executionRepository.findByFlowId(MAIN_TENANT,
TEST_NAMESPACE, "for-each-item-subflow-sleep", Pageable.UNPAGED);
assertThat(subFlowExecs.size()).isLessThanOrEqualTo(26);
assertThat(countDownLatch.getCount()).isGreaterThan(0L);
// assert on the main flow execution
assertThat(execution.getTaskRunList()).hasSize(4);
@@ -134,27 +165,38 @@ public class ForEachItemCaseTest {
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
// wait for the 26 flows to ends
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow-sleep");
Execution triggered = triggeredExecs.getLast();
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).as("Remaining count was " + countDownLatch.getCount()).isTrue();
receive.blockLast();
// assert on the last subflow execution
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow-sleep");
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-no-wait/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.getTaskRunList()).hasSize(2);
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow-sleep");
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-no-wait/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.get().getTaskRunList()).hasSize(2);
}
@SuppressWarnings("unchecked")
public void forEachItemFailed() throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = storageUpload(MAIN_TENANT);
public void forEachItemFailed() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(26);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("for-each-item-subflow-failed") && execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
});
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString());
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-failed", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
Duration.ofSeconds(60));
// we should have triggered 26 subflows
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow-failed");
Execution triggered = triggeredExecs.getLast();
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
receive.blockLast();
// assert on the main flow execution
assertThat(execution.getTaskRunList()).hasSize(3);
@@ -170,23 +212,34 @@ public class ForEachItemCaseTest {
assertThat(iterations.get("FAILED")).isEqualTo(26);
// assert on the last subflow execution
assertThat(triggered.getState().getCurrent()).isEqualTo(FAILED);
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow-failed");
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-failed/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.getTaskRunList()).hasSize(1);
assertThat(triggered.get().getState().getCurrent()).isEqualTo(FAILED);
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow-failed");
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-failed/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.get().getTaskRunList()).hasSize(1);
}
@SuppressWarnings("unchecked")
public void forEachItemWithSubflowOutputs() throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = storageUpload(MAIN_TENANT);
public void forEachItemWithSubflowOutputs() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(26);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("for-each-item-outputs-subflow") && execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
});
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString());
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-outputs", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
Duration.ofSeconds(30));
// we should have triggered 26 subflows
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-outputs-subflow");
Execution triggered = triggeredExecs.getLast();
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
receive.blockLast();
// assert on the main flow execution
assertThat(execution.getTaskRunList()).hasSize(5);
@@ -203,10 +256,10 @@ public class ForEachItemCaseTest {
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
// assert on the last subflow execution
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-outputs-subflow");
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-outputs/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.getTaskRunList()).hasSize(1);
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-outputs-subflow");
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-outputs/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.get().getTaskRunList()).hasSize(1);
// asserts for subflow merged outputs
Map<String, Object> mergeTaskOutputs = execution.getTaskRunList().get(3).getOutputs();
@@ -219,39 +272,84 @@ public class ForEachItemCaseTest {
}
}
public void restartForEachItem(String tenantId) throws Exception {
URI file = storageUpload(tenantId);
public void restartForEachItem() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(6);
Flux<Execution> receiveSubflows = TestsUtils.receive(executionQueue, either -> {
Execution subflowExecution = either.getLeft();
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isFailed()) {
countDownLatch.countDown();
}
});
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 20);
final Execution failedExecution = runnerUtils.runOne(tenantId, TEST_NAMESPACE, "restart-for-each-item", null,
final Execution failedExecution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "restart-for-each-item", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
Duration.ofSeconds(30));
assertThat(failedExecution.getTaskRunList()).hasSize(3);
assertThat(failedExecution.getState().getCurrent()).isEqualTo(FAILED);
// here we must have 1 failed subflows
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(6, tenantId, TEST_NAMESPACE, "restart-child");
assertThat(triggeredExecs).extracting(e -> e.getState().getCurrent()).containsOnly(FAILED);
assertTrue(countDownLatch.await(1, TimeUnit.MINUTES), "first run of flow should have FAILED");
receiveSubflows.blockLast();
Await.until(
() -> "first FAILED run of flow should have been persisted",
() -> getPersistedExecution(MAIN_TENANT, failedExecution.getId())
.map(exec -> exec.getState().getCurrent() == FAILED)
.orElse(false),
Duration.of(100, TimeUnit.MILLISECONDS.toChronoUnit()),
Duration.of(10, TimeUnit.SECONDS.toChronoUnit())
);
CountDownLatch successLatch = new CountDownLatch(6);
receiveSubflows = TestsUtils.receive(executionQueue, either -> {
Execution subflowExecution = either.getLeft();
if (subflowExecution.getFlowId().equals("restart-child") && subflowExecution.getState().getCurrent().isSuccess()) {
successLatch.countDown();
}
});
Execution restarted = executionService.restart(failedExecution, null);
final Execution successExecution = runnerUtils.emitAndAwaitExecution(
final Execution successExecution = runnerUtils.awaitExecution(
e -> e.getState().getCurrent() == State.Type.SUCCESS && e.getFlowId().equals("restart-for-each-item"),
restarted
throwRunnable(() -> executionQueue.emit(restarted)),
Duration.ofSeconds(20)
);
assertThat(successExecution.getTaskRunList()).hasSize(4);
triggeredExecs = runnerUtils.awaitFlowExecutionNumber(6, tenantId, TEST_NAMESPACE, "restart-child");
assertThat(triggeredExecs).extracting(e -> e.getState().getCurrent()).containsOnly(SUCCESS);
assertTrue(successLatch.await(1, TimeUnit.MINUTES), "second run of flow should have SUCCESS");
receiveSubflows.blockLast();
}
public void forEachItemInIf(String tenantId) throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = storageUpload(tenantId);
private Optional<Execution> getPersistedExecution(String tenant, String executionId) {
try {
return Optional.of(executionService.getExecution(tenant, executionId, false));
} catch (NoSuchElementException e) {
return Optional.empty();
}
}
public void forEachItemInIf() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(26);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("for-each-item-subflow") && execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
});
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
Execution execution = runnerUtils.runOne(tenantId, TEST_NAMESPACE, "for-each-item-in-if", null,
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-in-if", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
Duration.ofSeconds(30));
// we should have triggered 26 subflows
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, tenantId, TEST_NAMESPACE, "for-each-item-subflow");
Execution triggered = triggeredExecs.getLast();
assertTrue(countDownLatch.await(20, TimeUnit.SECONDS), "Remaining countdown: %s".formatted(countDownLatch.getCount()));
receive.blockLast();
// assert on the main flow execution
assertThat(execution.getTaskRunList()).hasSize(5);
@@ -265,25 +363,36 @@ public class ForEachItemCaseTest {
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
// assert on the last subflow execution
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow");
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-in-if/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.getTaskRunList()).hasSize(1);
Optional<Label> correlationId = triggered.getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow");
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-in-if/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.get().getTaskRunList()).hasSize(1);
Optional<Label> correlationId = triggered.get().getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
assertThat(correlationId.isPresent()).isTrue();
assertThat(correlationId.get().value()).isEqualTo(execution.getId());
}
public void forEachItemWithAfterExecution() throws TimeoutException, URISyntaxException, IOException, QueueException {
URI file = storageUpload(MAIN_TENANT);
public void forEachItemWithAfterExecution() throws TimeoutException, InterruptedException, URISyntaxException, IOException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(26);
AtomicReference<Execution> triggered = new AtomicReference<>();
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("for-each-item-subflow-after-execution") && execution.getState().getCurrent().isTerminated()) {
triggered.set(execution);
countDownLatch.countDown();
}
});
URI file = storageUpload();
Map<String, Object> inputs = Map.of("file", file.toString(), "batch", 4);
Execution execution = runnerUtils.runOne(MAIN_TENANT, TEST_NAMESPACE, "for-each-item-after-execution", null,
(flow, execution1) -> flowIO.readExecutionInputs(flow, execution1, inputs),
Duration.ofSeconds(30));
// we should have triggered 26 subflows
List<Execution> triggeredExecs = runnerUtils.awaitFlowExecutionNumber(26, MAIN_TENANT, TEST_NAMESPACE, "for-each-item-subflow-after-execution");
Execution triggered = triggeredExecs.getLast();
assertThat(countDownLatch.await(1, TimeUnit.MINUTES)).isTrue();
receive.blockLast();
// assert on the main flow execution
assertThat(execution.getTaskRunList()).hasSize(5);
@@ -299,33 +408,33 @@ public class ForEachItemCaseTest {
assertThat(iterations.get("SUCCESS")).isEqualTo(26);
// assert on the last subflow execution
assertThat(triggered.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.getFlowId()).isEqualTo("for-each-item-subflow-after-execution");
assertThat((String) triggered.getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-after-execution/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.getTaskRunList()).hasSize(2);
Optional<Label> correlationId = triggered.getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
assertThat(triggered.get().getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(triggered.get().getFlowId()).isEqualTo("for-each-item-subflow-after-execution");
assertThat((String) triggered.get().getInputs().get("items")).matches("kestra:///io/kestra/tests/for-each-item-after-execution/executions/.*/tasks/each-split/.*\\.txt");
assertThat(triggered.get().getTaskRunList()).hasSize(2);
Optional<Label> correlationId = triggered.get().getLabels().stream().filter(label -> label.key().equals(Label.CORRELATION_ID)).findAny();
assertThat(correlationId.isPresent()).isTrue();
assertThat(correlationId.get().value()).isEqualTo(execution.getId());
}
private URI storageUpload(String tenantId) throws URISyntaxException, IOException {
private URI storageUpload() throws URISyntaxException, IOException {
File tempFile = File.createTempFile("file", ".txt");
Files.write(tempFile.toPath(), content());
return storageInterface.put(
tenantId,
MAIN_TENANT,
null,
new URI("/file/storage/file.txt"),
new FileInputStream(tempFile)
);
}
private URI emptyItems(String tenantId) throws URISyntaxException, IOException {
private URI emptyItems() throws URISyntaxException, IOException {
File tempFile = File.createTempFile("file", ".txt");
return storageInterface.put(
tenantId,
MAIN_TENANT,
null,
new URI("/file/storage/file.txt"),
new FileInputStream(tempFile)

View File

@@ -2,16 +2,12 @@ package io.kestra.plugin.core.flow;
import static org.assertj.core.api.Assertions.assertThat;
import io.kestra.core.exceptions.InternalException;
import io.kestra.core.junit.annotations.ExecuteFlow;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.TaskRun;
import io.kestra.core.models.flows.State;
import org.junit.jupiter.api.Test;
import java.util.List;
@KestraTest(startRunner = true)
class ForEachTest {
@@ -64,13 +60,4 @@ class ForEachTest {
void nested(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
@Test
@ExecuteFlow("flows/valids/foreach-iteration.yaml")
void iteration(Execution execution) throws InternalException {
List<TaskRun> seconds = execution.findTaskRunsByTaskId("second");
assertThat(seconds).hasSize(2);
assertThat(seconds.get(0).getIteration()).isEqualTo(0);
assertThat(seconds.get(1).getIteration()).isEqualTo(1);
}
}

View File

@@ -6,7 +6,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
@@ -23,7 +23,7 @@ class IfTest {
private static final String TENANT_ID = "true";
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@LoadFlows(value = {"flows/valids/if-condition.yaml"}, tenantId = TENANT_ID)

View File

@@ -3,9 +3,10 @@ package io.kestra.plugin.core.flow;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import java.time.Duration;
import java.util.Map;
import java.util.concurrent.TimeoutException;
@@ -15,7 +16,7 @@ import static org.assertj.core.api.Assertions.assertThat;
public class LoopUntilCaseTest {
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
public void waitfor() throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "waitfor");

View File

@@ -10,7 +10,7 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
@@ -21,7 +21,7 @@ import java.util.concurrent.TimeoutException;
@KestraTest(startRunner = true)
class ParallelTest {
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Inject
private FlowInputOutput flowIO;

View File

@@ -8,8 +8,10 @@ import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.services.ExecutionService;
import io.kestra.core.storages.StorageInterface;
import io.micronaut.http.MediaType;
@@ -21,6 +23,7 @@ import io.micronaut.http.server.netty.multipart.NettyCompletedFileUpload;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http.multipart.*;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import jakarta.validation.ConstraintViolationException;
import org.junit.jupiter.api.Disabled;
@@ -37,6 +40,7 @@ import java.util.Map;
import java.util.concurrent.TimeoutException;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static io.kestra.core.utils.Rethrow.throwRunnable;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -44,7 +48,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
public class PauseTest {
@Inject
TestRunnerUtils runnerUtils;
RunnerUtils runnerUtils;
@Inject
Suite suite;
@@ -152,7 +156,11 @@ public class PauseTest {
@Inject
StorageInterface storageInterface;
public void run(TestRunnerUtils runnerUtils) throws Exception {
@Inject
@Named(QueueFactoryInterface.EXECUTION_NAMED)
protected QueueInterface<Execution> executionQueue;
public void run(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-test", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
Flow flow = flowRepository.findByExecution(execution);
@@ -168,15 +176,16 @@ public class PauseTest {
State.Type.RUNNING
);
execution = runnerUtils.emitAndAwaitExecution(
execution = runnerUtils.awaitExecution(
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
restarted
throwRunnable(() -> executionQueue.emit(restarted)),
Duration.ofSeconds(5)
);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
public void runDelay(TestRunnerUtils runnerUtils) throws Exception {
public void runDelay(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-delay", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
@@ -184,9 +193,9 @@ public class PauseTest {
assertThat(execution.getTaskRunList()).hasSize(1);
execution = runnerUtils.awaitExecution(
e ->
e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
execution
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
() -> {},
Duration.ofSeconds(5)
);
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).isEqualTo(1L);
@@ -194,7 +203,7 @@ public class PauseTest {
assertThat(execution.getTaskRunList()).hasSize(3);
}
public void runDurationFromInput(TestRunnerUtils runnerUtils) throws Exception {
public void runDurationFromInput(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-duration-from-input", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
@@ -202,9 +211,9 @@ public class PauseTest {
assertThat(execution.getTaskRunList()).hasSize(1);
execution = runnerUtils.awaitExecution(
e ->
e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
execution
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
() -> {},
Duration.ofSeconds(5)
);
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).isEqualTo(1L);
@@ -212,14 +221,14 @@ public class PauseTest {
assertThat(execution.getTaskRunList()).hasSize(3);
}
public void runParallelDelay(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
public void runParallelDelay(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "each-parallel-pause", Duration.ofSeconds(30));
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
assertThat(execution.getTaskRunList()).hasSize(7);
}
public void runTimeout(TestRunnerUtils runnerUtils) throws Exception {
public void runTimeout(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-timeout", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
@@ -228,7 +237,8 @@ public class PauseTest {
execution = runnerUtils.awaitExecution(
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.FAILED,
execution
() -> {},
Duration.ofSeconds(5)
);
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).as("Task runs were: " + execution.getTaskRunList().toString()).isEqualTo(1L);
@@ -237,7 +247,7 @@ public class PauseTest {
assertThat(execution.getTaskRunList()).hasSize(1);
}
public void runTimeoutAllowFailure(TestRunnerUtils runnerUtils) throws Exception {
public void runTimeoutAllowFailure(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause-timeout-allow-failure", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
@@ -246,7 +256,8 @@ public class PauseTest {
execution = runnerUtils.awaitExecution(
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.WARNING,
execution
() -> {},
Duration.ofSeconds(5)
);
assertThat(execution.getTaskRunList().getFirst().getState().getHistories().stream().filter(history -> history.getState() == State.Type.PAUSED).count()).as("Task runs were: " + execution.getTaskRunList().toString()).isEqualTo(1L);
@@ -255,7 +266,7 @@ public class PauseTest {
assertThat(execution.getTaskRunList()).hasSize(3);
}
public void runEmptyTasks(TestRunnerUtils runnerUtils) throws Exception {
public void runEmptyTasks(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause_no_tasks", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
Flow flow = flowRepository.findByExecution(execution);
@@ -271,16 +282,17 @@ public class PauseTest {
State.Type.RUNNING
);
execution = runnerUtils.emitAndAwaitExecution(
execution = runnerUtils.awaitExecution(
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
restarted
throwRunnable(() -> executionQueue.emit(restarted)),
Duration.ofSeconds(10)
);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
@SuppressWarnings("unchecked")
public void runOnResume(TestRunnerUtils runnerUtils) throws Exception {
public void runOnResume(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause_on_resume", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
Flow flow = flowRepository.findByExecution(execution);
@@ -303,9 +315,10 @@ public class PauseTest {
null
).block();
execution = runnerUtils.emitAndAwaitExecution(
execution = runnerUtils.awaitExecution(
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
restarted
throwRunnable(() -> executionQueue.emit(restarted)),
Duration.ofSeconds(10)
);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
@@ -316,7 +329,7 @@ public class PauseTest {
assertThat(CharStreams.toString(new InputStreamReader(storageInterface.get(MAIN_TENANT, null, URI.create((String) outputs.get("data")))))).isEqualTo(executionId);
}
public void runOnResumeMissingInputs(String tenantId, TestRunnerUtils runnerUtils) throws Exception {
public void runOnResumeMissingInputs(String tenantId, RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(tenantId, "io.kestra.tests", "pause_on_resume", null, null, Duration.ofSeconds(30));
Flow flow = flowRepository.findByExecution(execution);
@@ -331,7 +344,7 @@ public class PauseTest {
}
@SuppressWarnings("unchecked")
public void runOnResumeOptionalInputs(TestRunnerUtils runnerUtils) throws Exception {
public void runOnResumeOptionalInputs(RunnerUtils runnerUtils) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(MAIN_TENANT, "io.kestra.tests", "pause_on_resume_optional", null, null, Duration.ofSeconds(30));
String executionId = execution.getId();
Flow flow = flowRepository.findByExecution(execution);
@@ -340,9 +353,10 @@ public class PauseTest {
Execution restarted = executionService.resume(execution, flow, State.Type.RUNNING, Pause.Resumed.now());
execution = runnerUtils.emitAndAwaitExecution(
execution = runnerUtils.awaitExecution(
e -> e.getId().equals(executionId) && e.getState().getCurrent() == State.Type.SUCCESS,
restarted
throwRunnable(() -> executionQueue.emit(restarted)),
Duration.ofSeconds(10)
);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
@@ -351,7 +365,7 @@ public class PauseTest {
assertThat(outputs.get("asked")).isEqualTo("MISSING");
}
public void runDurationWithBehavior(String tenantId, TestRunnerUtils runnerUtils, Pause.Behavior behavior) throws Exception {
public void runDurationWithBehavior(String tenantId, RunnerUtils runnerUtils, Pause.Behavior behavior) throws Exception {
Execution execution = runnerUtils.runOneUntilPaused(tenantId, "io.kestra.tests", "pause-behavior", null, (unused, _unused) -> Map.of("behavior", behavior), Duration.ofSeconds(30));
String executionId = execution.getId();
@@ -360,7 +374,8 @@ public class PauseTest {
execution = runnerUtils.awaitExecution(
e -> e.getId().equals(executionId) && e.getState().getCurrent().isTerminated(),
execution
() -> {},
Duration.ofSeconds(5)
);
State.Type finalState = behavior == Pause.Behavior.RESUME ? State.Type.SUCCESS : behavior.mapToState();

View File

@@ -1,22 +1,27 @@
package io.kestra.plugin.core.flow;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.Flow;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.utils.Await;
import io.micronaut.data.model.Pageable;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.inject.Singleton;
import lombok.extern.slf4j.Slf4j;
import reactor.core.publisher.Flux;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
@Slf4j
@@ -24,11 +29,11 @@ import static org.assertj.core.api.Assertions.assertThat;
public class RetryCaseTest {
@Inject
protected TestRunnerUtils runnerUtils;
@Named(QueueFactoryInterface.EXECUTION_NAMED)
private QueueInterface<Execution> executionQueue;
@Inject
private ExecutionRepositoryInterface executionRepository;
@Inject
private FlowRepositoryInterface flowRepository;
protected RunnerUtils runnerUtils;
public void retrySuccess(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.WARNING);
@@ -67,53 +72,117 @@ public class RetryCaseTest {
}
public void retryNewExecutionTaskDuration(String tenant) throws TimeoutException, QueueException {
var flow = flowRepository
.findById(tenant, "io.kestra.tests", "retry-new-execution-task-duration")
.orElseThrow();
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
}
public void retryNewExecutionTaskDuration() throws TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(3);
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
public void retryNewExecutionTaskAttempts(String tenant) throws TimeoutException, QueueException {
var flow = flowRepository
.findById(tenant, "io.kestra.tests", "retry-new-execution-task-attempts")
.orElseThrow();
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
}
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("retry-new-execution-task-duration") && execution.getState().getCurrent().isTerminated()) {
List<State.Type> stateHistoryList = stateHistory.get();
stateHistoryList.add(execution.getState().getCurrent());
stateHistory.set(stateHistoryList);
countDownLatch.countDown();
}
});
public void retryNewExecutionFlowDuration(String tenant) throws TimeoutException, QueueException {
var flow = flowRepository
.findById(tenant, "io.kestra.tests", "retry-new-execution-flow-duration")
.orElseThrow();
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
}
public void retryNewExecutionFlowAttempts(String tenant) throws TimeoutException, QueueException {
var flow = flowRepository
.findById(tenant, "io.kestra.tests", "retry-new-execution-flow-attempts")
.orElseThrow();
runAndAssertThereWasTwoRetriesAndFinishedFailed(flow);
}
private void runAndAssertThereWasTwoRetriesAndFinishedFailed(Flow flow) throws TimeoutException, QueueException {
runnerUtils.runOne(
Execution.newExecution(flow, null),
flow,
Duration.ofSeconds(10)
MAIN_TENANT,
"io.kestra.tests",
"retry-new-execution-task-duration",
null,
null
);
Await.until(
() -> "flow should have ended in Failed state",
() -> executionRepository.findLatestForStates(flow.getTenantId(), flow.getNamespace(), flow.getId(), List.of(State.Type.FAILED)).isPresent(),
Duration.ofMillis(100),
Duration.ofSeconds(10)
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
receive.blockLast();
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
}
public void retryNewExecutionTaskAttempts() throws TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(3);
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("retry-new-execution-task-attempts") && execution.getState().getCurrent().isTerminated()) {
List<State.Type> stateHistoryList = stateHistory.get();
stateHistoryList.add(execution.getState().getCurrent());
stateHistory.set(stateHistoryList);
countDownLatch.countDown();
}
});
runnerUtils.runOne(
MAIN_TENANT,
"io.kestra.tests",
"retry-new-execution-task-attempts",
null,
null
);
var executions = executionRepository.findByFlowId(flow.getTenantId(), flow.getNamespace(), flow.getId(), Pageable.UNPAGED);
assertThat(executions.stream().map(e -> e.getState().getCurrent())).contains(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
receive.blockLast();
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
}
public void retryNewExecutionFlowDuration() throws TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(3);
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("retry-new-execution-flow-duration") && execution.getState().getCurrent().isTerminated()) {
List<State.Type> stateHistoryList = stateHistory.get();
stateHistoryList.add(execution.getState().getCurrent());
stateHistory.set(stateHistoryList);
countDownLatch.countDown();
}
});
runnerUtils.runOne(
MAIN_TENANT,
"io.kestra.tests",
"retry-new-execution-flow-duration",
null,
null
);
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
receive.blockLast();
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
}
public void retryNewExecutionFlowAttempts() throws TimeoutException, QueueException {
CountDownLatch countDownLatch = new CountDownLatch(3);
AtomicReference<List<State.Type>> stateHistory = new AtomicReference<>(new ArrayList<>());
Flux<Execution> receive = TestsUtils.receive(executionQueue, either -> {
Execution execution = either.getLeft();
if (execution.getFlowId().equals("retry-new-execution-flow-attempts") && execution.getState().getCurrent().isTerminated()) {
List<State.Type> stateHistoryList = stateHistory.get();
stateHistoryList.add(execution.getState().getCurrent());
stateHistory.set(stateHistoryList);
countDownLatch.countDown();
}
});
runnerUtils.runOne(
MAIN_TENANT,
"io.kestra.tests",
"retry-new-execution-flow-attempts",
null,
null
);
Await.until(() -> countDownLatch.getCount() == 0, Duration.ofSeconds(2), Duration.ofMinutes(1));
receive.blockLast();
assertThat(stateHistory.get()).containsExactlyInAnyOrder(State.Type.RETRIED, State.Type.RETRIED, State.Type.FAILED);
}
public void retryFailedTaskDuration(Execution execution) {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(execution.getTaskRunList().getFirst().attemptNumber()).isGreaterThanOrEqualTo(2);
assertThat(execution.getTaskRunList().getFirst().attemptNumber()).isEqualTo(3);
}
public void retryFailedTaskAttempts(Execution execution) {

View File

@@ -10,7 +10,7 @@ import io.kestra.core.models.Label;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import java.util.List;
import java.util.Map;
@@ -21,7 +21,7 @@ import org.junit.jupiter.api.Test;
class RuntimeLabelsTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@LoadFlows({"flows/valids/labels-update-task.yml"})
@@ -160,25 +160,4 @@ class RuntimeLabelsTest {
new Label("fromListKey", "value2")
);
}
@Test
@LoadFlows({"flows/valids/labels-update-task-empty.yml"})
void updateIgnoresEmpty() throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(
MAIN_TENANT,
"io.kestra.tests",
"labels-update-task-empty",
null,
(flow, createdExecution) -> Map.of(),
null,
List.of()
);
assertThat(execution.getTaskRunList()).hasSize(1);
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.FAILED);
assertThat(execution.getLabels()).containsExactly(
new Label(Label.CORRELATION_ID, execution.getId())
);
}
}

View File

@@ -10,7 +10,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.utils.IdUtils;
import jakarta.inject.Inject;
import java.util.List;
@@ -24,7 +24,7 @@ class StateTest {
public static final String FLOW_ID = "state";
public static final String NAMESPACE = "io.kestra.tests";
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@SuppressWarnings("unchecked")
@Test

View File

@@ -9,7 +9,7 @@ import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.ExecutionRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import org.junit.jupiter.api.Test;
@@ -27,7 +27,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
class SubflowRunnerTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Inject
private ExecutionRepositoryInterface executionRepository;

View File

@@ -9,7 +9,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import java.util.concurrent.TimeoutException;
import org.junit.jupiter.api.Test;
@@ -18,7 +18,7 @@ import org.junit.jupiter.api.Test;
class SwitchTest {
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@LoadFlows(value = {"flows/valids/switch.yaml"}, tenantId = "switch")

View File

@@ -11,7 +11,7 @@ import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.TemplateRepositoryInterface;
import io.kestra.core.runners.FlowInputOutput;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.plugin.core.log.Log;
import io.kestra.core.utils.TestsUtils;
import io.micronaut.context.annotation.Property;
@@ -45,7 +45,7 @@ public class TemplateTest {
private FlowInputOutput flowIO;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
public static final io.kestra.core.models.templates.Template TEMPLATE_1 = io.kestra.core.models.templates.Template.builder()
.id("template")
@@ -54,7 +54,7 @@ public class TemplateTest {
.tasks(Collections.singletonList(Log.builder().id("test").type(Log.class.getName()).message("{{ parent.outputs.args['my-forward'] }}").build())).build();
public static void withTemplate(
TestRunnerUtils runnerUtils,
RunnerUtils runnerUtils,
TemplateRepositoryInterface templateRepository,
QueueInterface<LogEntry> logQueue,
FlowInputOutput flowIO
@@ -90,7 +90,7 @@ public class TemplateTest {
}
public static void withFailedTemplate(TestRunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
public static void withFailedTemplate(RunnerUtils runnerUtils, QueueInterface<LogEntry> logQueue) throws TimeoutException, QueueException {
List<LogEntry> logs = new CopyOnWriteArrayList<>();
Flux<LogEntry> receive = TestsUtils.receive(logQueue, either -> logs.add(either.getLeft()));

View File

@@ -11,7 +11,7 @@ import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.FlowRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
@@ -37,7 +37,7 @@ class TimeoutTest {
private QueueInterface<LogEntry> workerTaskLogQueue;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@RetryingTest(5) // Flaky on CI but never locally even with 100 repetitions
void timeout() throws TimeoutException, QueueException {

View File

@@ -12,7 +12,7 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.queues.QueueException;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
@@ -31,7 +31,7 @@ class VariablesTest {
QueueInterface<LogEntry> workerTaskLogQueue;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@ExecuteFlow("flows/valids/variables.yaml")

View File

@@ -16,7 +16,7 @@ import io.kestra.core.models.flows.State;
import io.kestra.core.models.tasks.common.EncryptedString;
import io.kestra.core.queues.QueueException;
import io.kestra.core.runners.RunContextFactory;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.core.storages.InternalStorage;
import io.kestra.core.storages.StorageContext;
import io.kestra.core.storages.StorageInterface;
@@ -32,6 +32,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.RetryingTest;
@KestraTest(startRunner = true)
public class WorkingDirectoryTest {
@@ -42,7 +43,7 @@ public class WorkingDirectoryTest {
RunContextFactory runContextFactory;
@Inject
TestRunnerUtils runnerUtils;
RunnerUtils runnerUtils;
@Test
@LoadFlows({"flows/valids/working-directory.yaml"})
@@ -121,7 +122,7 @@ public class WorkingDirectoryTest {
@Inject
StorageInterface storageInterface;
public void success(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
public void success(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory", null,
(f, e) -> ImmutableMap.of("failed", "false"), Duration.ofSeconds(60)
);
@@ -131,7 +132,7 @@ public class WorkingDirectoryTest {
assertThat((String) execution.getTaskRunList().get(3).getOutputs().get("value")).startsWith("kestra://");
}
public void failed(String tenantId, TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
public void failed(String tenantId, RunnerUtils runnerUtils) throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "working-directory", null,
(f, e) -> ImmutableMap.of("failed", "true"), Duration.ofSeconds(60)
);
@@ -141,7 +142,7 @@ public class WorkingDirectoryTest {
assertThat(execution.findTaskRunsByTaskId("error-t1")).hasSize(1);
}
public void each(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
public void each(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-each", Duration.ofSeconds(60));
assertThat(execution.getTaskRunList()).hasSize(8);
@@ -150,7 +151,7 @@ public class WorkingDirectoryTest {
}
@SuppressWarnings("unchecked")
public void outputFiles(String tenantId, TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
public void outputFiles(String tenantId, RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
Execution execution = runnerUtils.runOne(tenantId, "io.kestra.tests", "working-directory-outputs");
@@ -177,7 +178,7 @@ public class WorkingDirectoryTest {
}
@SuppressWarnings("unchecked")
public void inputFiles(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
public void inputFiles(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-inputs");
@@ -203,7 +204,7 @@ public class WorkingDirectoryTest {
}
@SuppressWarnings({"unchecked", "OptionalGetWithoutIsPresent"})
public void cache(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
public void cache(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
// make sure the cache didn't exist
StorageContext storageContext = StorageContext.forFlow(Flow
.builder()
@@ -246,7 +247,7 @@ public class WorkingDirectoryTest {
assertThat(execution.getState().getCurrent()).isEqualTo(State.Type.SUCCESS);
}
public void taskRun(TestRunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
public void taskRun(RunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-taskrun");
assertThat(execution.getTaskRunList()).hasSize(3);
@@ -254,7 +255,7 @@ public class WorkingDirectoryTest {
assertThat(((String) execution.findTaskRunByTaskIdAndValue("log-taskrun", List.of("1")).getOutputs().get("value"))).contains("1");
}
public void taskRunNested(TestRunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
public void taskRunNested(RunnerUtils runnerUtils) throws TimeoutException, InternalException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-taskrun-nested");
assertThat(execution.getTaskRunList()).hasSize(6);
@@ -262,7 +263,7 @@ public class WorkingDirectoryTest {
assertThat(((String) execution.findTaskRunByTaskIdAndValue("log-workerparent", List.of("1")).getOutputs().get("value"))).contains("{\"taskrun\":{\"value\":\"1\"}}");
}
public void namespaceFiles(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
public void namespaceFiles(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
put("/test/a/b/c/1.txt", "first");
put("/a/b/c/2.txt", "second");
put("/a/b/3.txt", "third");
@@ -278,7 +279,7 @@ public class WorkingDirectoryTest {
assertThat(execution.findTaskRunsByTaskId("t3").getFirst().getOutputs().get("value")).isEqualTo("third");
}
public void namespaceFilesWithNamespaces(TestRunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
public void namespaceFilesWithNamespaces(RunnerUtils runnerUtils) throws TimeoutException, IOException, QueueException {
//fist namespace
put("/test/a/b/c/1.txt", "first in first namespace", "io.test.first");
put("/a/b/c/2.txt", "second in first namespace", "io.test.first");
@@ -303,7 +304,7 @@ public class WorkingDirectoryTest {
}
@SuppressWarnings("unchecked")
public void encryption(TestRunnerUtils runnerUtils, RunContextFactory runContextFactory) throws TimeoutException, GeneralSecurityException, QueueException {
public void encryption(RunnerUtils runnerUtils, RunContextFactory runContextFactory) throws TimeoutException, GeneralSecurityException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-taskrun-encrypted");
assertThat(execution.getTaskRunList()).hasSize(3);
@@ -315,7 +316,7 @@ public class WorkingDirectoryTest {
assertThat(execution.findTaskRunsByTaskId("decrypted").getFirst().getOutputs().get("value")).isEqualTo("Hello World");
}
public void invalidRunIf(TestRunnerUtils runnerUtils) throws TimeoutException, QueueException {
public void invalidRunIf(RunnerUtils runnerUtils) throws TimeoutException, QueueException {
Execution execution = runnerUtils.runOne(MAIN_TENANT, "io.kestra.tests", "working-directory-invalid-runif", null,
(f, e) -> ImmutableMap.of("failed", "false"), Duration.ofSeconds(60)
);

View File

@@ -130,7 +130,7 @@ public class PurgeKVTest {
KVStore kvStore2 = runContext.namespaceKv(namespace2);
kvStore2.put(KEY_EXPIRED, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMillis(1L)), "unused"));
kvStore2.put(KEY, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMinutes(1L)), "unused"));
kvStore2.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused", (Duration) null), "unused"));
kvStore2.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused", null), "unused"));
kvStore2.put(KEY3_NEVER_EXPIRING, new KVValueAndMetadata(null, "unused"));
PurgeKV purgeKV = PurgeKV.builder()
@@ -156,7 +156,7 @@ public class PurgeKVTest {
KVStore kvStore1 = runContext.namespaceKv(namespace);
kvStore1.put(KEY_EXPIRED, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMillis(1L)), "unused"));
kvStore1.put(KEY, new KVValueAndMetadata(new KVMetadata("unused", Duration.ofMinutes(1L)), "unused"));
kvStore1.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused",(Duration) null), "unused"));
kvStore1.put(KEY2_NEVER_EXPIRING, new KVValueAndMetadata(new KVMetadata("unused", null), "unused"));
kvStore1.put(KEY3_NEVER_EXPIRING, new KVValueAndMetadata(null, "unused"));
PurgeKV purgeKV = PurgeKV.builder()

View File

@@ -11,7 +11,6 @@ import io.kestra.core.storages.kv.KVStore;
import io.kestra.core.storages.kv.KVStoreException;
import io.kestra.core.storages.kv.KVValue;
import io.kestra.core.storages.kv.KVValueAndMetadata;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Assertions;
@@ -170,7 +169,6 @@ class SetTest {
@Test
void shouldFailGivenExistingKeyAndOverwriteFalse() throws Exception {
// Given
String key = IdUtils.create();
Set set = Set.builder()
.id(Set.class.getSimpleName())
.type(Set.class.getName())
@@ -181,22 +179,16 @@ class SetTest {
var value = Map.of("date", Instant.now().truncatedTo(ChronoUnit.MILLIS), "int", 1, "string", "string");
final RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, set, Map.of(
"key", key,
"key", "existing_key",
"value", value
));
// When - Then
//set key a first:
runContext.namespaceKv(runContext.flowInfo().namespace()).put(key, new KVValueAndMetadata(new KVMetadata("unused", (Instant)null), value));
runContext.namespaceKv(runContext.flowInfo().namespace()).put("existing_key", new KVValueAndMetadata(new KVMetadata("unused", null), value));
//fail because key is already set
KVStoreException exception = Assertions.assertThrows(KVStoreException.class, () -> Set.builder()
.id(Set.class.getSimpleName())
.type(Set.class.getName())
.key(new Property<>("{{ inputs.key }}"))
.value(new Property<>("{{ inputs.value }}"))
.overwrite(Property.ofValue(false))
.build().run(runContext));
assertThat(exception.getMessage()).isEqualTo("Cannot set value for key '%s'. Key already exists and `overwrite` is set to `false`.".formatted(key));
KVStoreException exception = Assertions.assertThrows(KVStoreException.class, () -> set.run(runContext));
assertThat(exception.getMessage()).isEqualTo("Cannot set value for key 'existing_key'. Key already exists and `overwrite` is set to `false`.");
}
@Test

View File

@@ -5,7 +5,7 @@ import io.kestra.core.junit.annotations.LoadFlows;
import io.kestra.core.models.executions.Execution;
import io.kestra.core.models.executions.LogEntry;
import io.kestra.core.repositories.LogRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import jakarta.inject.Inject;
import java.time.temporal.ChronoUnit;
import java.util.stream.Stream;
@@ -29,7 +29,7 @@ class PurgeLogsTest {
private LogRepositoryInterface logRepository;
@Inject
protected TestRunnerUtils runnerUtils;
protected RunnerUtils runnerUtils;
@Test
@LoadFlows("flows/valids/purge_logs_no_arguments.yaml")

View File

@@ -1,12 +1,11 @@
package io.kestra.plugin.core.storage;
import io.kestra.core.context.TestRunContextFactory;
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
import io.kestra.core.models.property.Property;
import io.kestra.core.runners.RunContextFactory;
import io.kestra.core.storages.StorageInterface;
import io.kestra.core.utils.IdUtils;
import io.kestra.core.junit.annotations.KestraTest;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import org.junit.jupiter.api.Test;
@@ -20,6 +19,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import static io.kestra.core.tenant.TenantService.MAIN_TENANT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
@@ -27,16 +27,16 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
@KestraTest
class LocalFilesTest {
@Inject
TestRunContextFactory runContextFactory;
RunContextFactory runContextFactory;
@Inject
StorageInterface storageInterface;
private URI internalFiles(String tenantId) throws IOException, URISyntaxException {
private URI internalFiles() throws IOException, URISyntaxException {
var resource = ConcatTest.class.getClassLoader().getResource("application-test.yml");
return storageInterface.put(
tenantId,
MAIN_TENANT,
null,
new URI("/file/storage/get.yml"),
new FileInputStream(Objects.requireNonNull(resource).getFile())
@@ -46,9 +46,8 @@ class LocalFilesTest {
@Test
void run() throws Exception {
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
var runContext = runContextFactory.of("namesapce", tenant, Map.of("toto", "tata"));
var storageFile = internalFiles(tenant);
var runContext = runContextFactory.of(Map.of("toto", "tata"));
var storageFile = internalFiles();
var task = LocalFiles.builder()
.id(IdUtils.create())
@@ -65,19 +64,18 @@ class LocalFilesTest {
assertThat(outputs).isNotNull();
assertThat(outputs.getUris()).isNotNull();
assertThat(outputs.getUris().size()).isEqualTo(1);
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
assertThat(runContext.workingDir().path().toFile().list().length).isEqualTo(2);
assertThat(Files.readString(runContext.workingDir().path().resolve("execution.txt"))).isEqualTo("tata");
assertThat(Files.readString(runContext.workingDir().path().resolve("application-test.yml"))).isEqualTo(new String(storageInterface.get(tenant, null, storageFile).readAllBytes()));
assertThat(Files.readString(runContext.workingDir().path().resolve("application-test.yml"))).isEqualTo(new String(storageInterface.get(MAIN_TENANT, null, storageFile).readAllBytes()));
runContext.cleanup();
}
@Test
void recursive() throws Exception {
String tenant = TestsUtils.randomTenant(this.getClass().getSimpleName());
var runContext = runContextFactory.of("namesapce", tenant, Map.of("toto", "tata"));
var storageFile = internalFiles(tenant);
var runContext = runContextFactory.of(Map.of("toto", "tata"));
var storageFile = internalFiles();
var task = LocalFiles.builder()
.id(IdUtils.create())
@@ -94,11 +92,11 @@ class LocalFilesTest {
assertThat(outputs).isNotNull();
assertThat(outputs.getUris()).isNotNull();
assertThat(outputs.getUris().size()).isEqualTo(3);
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("test/hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("test/sub/dir/2/execution.txt"))
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("test/hello-input.txt")).readAllBytes())).isEqualTo("Hello Input");
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("test/sub/dir/2/execution.txt"))
.readAllBytes())).isEqualTo("tata");
assertThat(new String(storageInterface.get(tenant, null, outputs.getUris().get("test/sub/dir/3/application-test.yml"))
.readAllBytes())).isEqualTo(new String(storageInterface.get(tenant, null, storageFile).readAllBytes()));
assertThat(new String(storageInterface.get(MAIN_TENANT, null, outputs.getUris().get("test/sub/dir/3/application-test.yml"))
.readAllBytes())).isEqualTo(new String(storageInterface.get(MAIN_TENANT, null, storageFile).readAllBytes()));
runContext.cleanup();
}

View File

@@ -168,12 +168,12 @@ class FlowTest {
Optional<Execution> evaluate = flowTrigger.evaluate(multipleConditionStorage, runContextFactory.of(), flow, execution);
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).hasSize(5);
assertThat(evaluate.get().getLabels()).hasSize(6);
assertThat(evaluate.get().getLabels()).contains(new Label("flow-label-1", "flow-label-1"));
assertThat(evaluate.get().getLabels()).contains(new Label("flow-label-2", "flow-label-2"));
assertThat(evaluate.get().getLabels()).contains(new Label("trigger-label-1", "trigger-label-1"));
assertThat(evaluate.get().getLabels()).contains(new Label("trigger-label-2", "trigger-label-2"));
assertThat(evaluate.get().getLabels()).doesNotContain(new Label("trigger-label-3", ""));
assertThat(evaluate.get().getLabels()).contains(new Label("trigger-label-3", ""));
assertThat(evaluate.get().getLabels()).contains(new Label(Label.CORRELATION_ID, "correlationId"));
assertThat(evaluate.get().getTrigger()).extracting(ExecutionTrigger::getVariables).hasFieldOrProperty("executionLabels");
assertThat(evaluate.get().getTrigger().getVariables().get("executionLabels")).isEqualTo(Map.of("execution-label", "execution"));

View File

@@ -169,7 +169,7 @@ class ScheduleTest {
assertThat(evaluate.isPresent()).isTrue();
assertThat(evaluate.get().getLabels()).contains(new Label("trigger-label-1", "trigger-label-1"));
assertThat(evaluate.get().getLabels()).contains(new Label("trigger-label-2", "trigger-label-2"));
assertThat(evaluate.get().getLabels()).doesNotContain(new Label("trigger-label-3", ""));
assertThat(evaluate.get().getLabels()).contains(new Label("trigger-label-3", ""));
}
@Test

View File

@@ -11,14 +11,12 @@ import io.kestra.core.models.triggers.Trigger;
import io.kestra.core.queues.QueueFactoryInterface;
import io.kestra.core.queues.QueueInterface;
import io.kestra.core.repositories.TriggerRepositoryInterface;
import io.kestra.core.runners.TestRunnerUtils;
import io.kestra.core.runners.RunnerUtils;
import io.kestra.scheduler.AbstractScheduler;
import io.kestra.core.utils.Await;
import io.kestra.core.utils.TestsUtils;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@@ -38,13 +36,13 @@ class ToggleTest {
private AbstractScheduler scheduler;
@Inject
private TestRunnerUtils runnerUtils;
private RunnerUtils runnerUtils;
@Test
@LoadFlows({"flows/valids/trigger-toggle.yaml"})
void toggle() throws Exception {
// we need to await for the scheduler to be ready otherwise there may be an issue with updating the trigger
Await.until(() -> scheduler.isReady(), Duration.ofMillis(100), Duration.ofSeconds(20));
Await.until(() -> scheduler.isReady());
Trigger trigger = Trigger
.builder()

View File

@@ -1,36 +0,0 @@
id: exit-nested
namespace: io.kestra.tests
inputs:
- id: someBool
type: BOOL
defaults: true
- id: secondBool
type: BOOL
defaults: false
tasks:
- id: if_some_bool
type: io.kestra.plugin.core.flow.If
condition: "{{ inputs.someBool }}"
then:
- id: was_true
type: io.kestra.plugin.core.log.Log
message: The value was true
- id: nested_bool_check
type: io.kestra.plugin.core.flow.If
condition: "{{ inputs.secondBool }}"
then:
- id: was_also_true
type: io.kestra.plugin.core.log.Log
message: Was also true
else:
- id: nested_was_false
type: io.kestra.plugin.core.execution.Exit
state: FAILED
else:
- id: was_false
type: io.kestra.plugin.core.execution.Exit
state: FAILED

Some files were not shown because too many files have changed in this diff Show More