mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-25 02:14:38 -05:00
feat(queue): introduce Kafka Queue
This commit is contained in:
42
build.gradle
42
build.gradle
@@ -1,9 +1,10 @@
|
|||||||
buildscript {
|
buildscript {
|
||||||
ext {
|
ext {
|
||||||
micronautVersion = "1.2.0"
|
micronautVersion = "1.2.3"
|
||||||
confluentVersion = "5.2.1"
|
confluentVersion = "5.2.1"
|
||||||
kafkaVersion = "2.3.0"
|
kafkaVersion = "2.3.0"
|
||||||
avroVersion = "1.9.0"
|
avroVersion = "1.9.0"
|
||||||
|
lombokVersion = "1.18.10"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -15,6 +16,9 @@ plugins {
|
|||||||
id "net.ltgt.apt-idea" version "0.21"
|
id "net.ltgt.apt-idea" version "0.21"
|
||||||
id "com.github.johnrengelman.shadow" version "4.0.2"
|
id "com.github.johnrengelman.shadow" version "4.0.2"
|
||||||
id "application"
|
id "application"
|
||||||
|
|
||||||
|
// test
|
||||||
|
id 'com.adarshr.test-logger' version '1.7.0'
|
||||||
}
|
}
|
||||||
|
|
||||||
idea {
|
idea {
|
||||||
@@ -32,6 +36,7 @@ sourceCompatibility = 11
|
|||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(":cli")
|
compile project(":cli")
|
||||||
|
testCompile project(":repository-memory")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -53,7 +58,10 @@ allprojects {
|
|||||||
apply plugin:"java"
|
apply plugin:"java"
|
||||||
apply plugin:"net.ltgt.apt-eclipse"
|
apply plugin:"net.ltgt.apt-eclipse"
|
||||||
apply plugin:"net.ltgt.apt-idea"
|
apply plugin:"net.ltgt.apt-idea"
|
||||||
apply plugin: "io.spring.dependency-management"
|
apply plugin:"io.spring.dependency-management"
|
||||||
|
|
||||||
|
// test
|
||||||
|
apply plugin:"com.adarshr.test-logger"
|
||||||
|
|
||||||
dependencyManagement {
|
dependencyManagement {
|
||||||
imports {
|
imports {
|
||||||
@@ -71,8 +79,8 @@ allprojects {
|
|||||||
// utils
|
// utils
|
||||||
runtime "ch.qos.logback:logback-classic:1.2.3"
|
runtime "ch.qos.logback:logback-classic:1.2.3"
|
||||||
compile group: 'com.google.guava', name: 'guava', version: '27.1-jre'
|
compile group: 'com.google.guava', name: 'guava', version: '27.1-jre'
|
||||||
compileOnly 'org.projectlombok:lombok:1.18.8'
|
compileOnly 'org.projectlombok:lombok:' + lombokVersion
|
||||||
annotationProcessor "org.projectlombok:lombok:1.18.8"
|
annotationProcessor "org.projectlombok:lombok:" + lombokVersion
|
||||||
|
|
||||||
// micronaut
|
// micronaut
|
||||||
annotationProcessor "io.micronaut:micronaut-inject-java"
|
annotationProcessor "io.micronaut:micronaut-inject-java"
|
||||||
@@ -96,6 +104,22 @@ allprojects {
|
|||||||
// floworc
|
// floworc
|
||||||
compile group: 'com.devskiller.friendly-id', name: 'friendly-id', version: '1.1.0'
|
compile group: 'com.devskiller.friendly-id', name: 'friendly-id', version: '1.1.0'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// test
|
||||||
|
test {
|
||||||
|
useJUnitPlatform()
|
||||||
|
|
||||||
|
testLogging {
|
||||||
|
exceptionFormat = "full"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
testlogger {
|
||||||
|
theme 'mocha-parallel'
|
||||||
|
showExceptions true
|
||||||
|
slowThreshold 2000
|
||||||
|
showStandardStreams true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**********************************************************************************************************************\
|
/**********************************************************************************************************************\
|
||||||
@@ -115,16 +139,6 @@ run.jvmArgs(
|
|||||||
"-Dcom.sun.management.jmxremote",
|
"-Dcom.sun.management.jmxremote",
|
||||||
'-Dmicronaut.environments=dev,override'
|
'-Dmicronaut.environments=dev,override'
|
||||||
)
|
)
|
||||||
/**********************************************************************************************************************\
|
|
||||||
* Test
|
|
||||||
**********************************************************************************************************************/
|
|
||||||
test {
|
|
||||||
useJUnitPlatform()
|
|
||||||
|
|
||||||
testLogging {
|
|
||||||
exceptionFormat = "full"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**********************************************************************************************************************\
|
/**********************************************************************************************************************\
|
||||||
* Jar
|
* Jar
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ dependencies {
|
|||||||
// modules
|
// modules
|
||||||
compile project(":core")
|
compile project(":core")
|
||||||
|
|
||||||
compile project(":repository-local")
|
compile project(":repository-memory")
|
||||||
|
|
||||||
compile project(":runner-memory")
|
compile project(":runner-memory")
|
||||||
compile project(":runner-kafka")
|
compile project(":runner-kafka")
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
package org.floworc.core;
|
package org.floworc.cli;
|
||||||
|
|
||||||
import io.micronaut.configuration.picocli.PicocliRunner;
|
import io.micronaut.configuration.picocli.PicocliRunner;
|
||||||
import org.floworc.core.commands.TestCommand;
|
import org.floworc.cli.commands.TestCommand;
|
||||||
import org.floworc.core.commands.WorkerCommand;
|
import org.floworc.cli.commands.WorkerCommand;
|
||||||
import picocli.CommandLine;
|
import picocli.CommandLine;
|
||||||
|
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
55
cli/src/main/java/org/floworc/cli/commands/TestCommand.java
Normal file
55
cli/src/main/java/org/floworc/cli/commands/TestCommand.java
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
package org.floworc.cli.commands;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.floworc.core.models.flows.Flow;
|
||||||
|
import org.floworc.core.repositories.FlowRepositoryInterface;
|
||||||
|
import org.floworc.core.repositories.LocalFlowRepositoryLoader;
|
||||||
|
import org.floworc.core.runners.RunnerUtils;
|
||||||
|
import org.floworc.runner.memory.MemoryRunner;
|
||||||
|
import picocli.CommandLine;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
|
||||||
|
@CommandLine.Command(
|
||||||
|
name = "test",
|
||||||
|
description = "test a flow"
|
||||||
|
)
|
||||||
|
@Slf4j
|
||||||
|
public class TestCommand implements Runnable {
|
||||||
|
@CommandLine.Parameters(description = "the flow file to test")
|
||||||
|
private Path file;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private MemoryRunner runner;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private LocalFlowRepositoryLoader repositoryLoader;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private FlowRepositoryInterface flowRepository;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private RunnerUtils runnerUtils;
|
||||||
|
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
runner.run();
|
||||||
|
repositoryLoader.load(file.toFile());
|
||||||
|
|
||||||
|
List<Flow> all = flowRepository.findAll();
|
||||||
|
if (all.size() != 1) {
|
||||||
|
throw new IllegalArgumentException("Too many flow found, need 1, found " + all.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
runnerUtils.runOne(all.get(0).getId());
|
||||||
|
runner.close();
|
||||||
|
} catch (IOException | TimeoutException e) {
|
||||||
|
throw new IllegalStateException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package org.floworc.core.commands;
|
package org.floworc.cli.commands;
|
||||||
|
|
||||||
import io.micronaut.context.ApplicationContext;
|
import io.micronaut.context.ApplicationContext;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
@@ -27,5 +27,7 @@ public class WorkerCommand implements Runnable {
|
|||||||
for (int i = 0; i < thread; i++) {
|
for (int i = 0; i < thread; i++) {
|
||||||
poolExecutor.execute(applicationContext.getBean(Worker.class));
|
poolExecutor.execute(applicationContext.getBean(Worker.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.info("Workers started with {} thread(s)", this.thread);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
package org.floworc.core.commands;
|
|
||||||
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.floworc.core.models.executions.Execution;
|
|
||||||
import org.floworc.core.models.flows.Flow;
|
|
||||||
import org.floworc.core.serializers.YamlFlowParser;
|
|
||||||
import org.floworc.runner.memory.MemoryRunner;
|
|
||||||
import picocli.CommandLine;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "test",
|
|
||||||
description = "test a flow"
|
|
||||||
)
|
|
||||||
@Slf4j
|
|
||||||
public class TestCommand implements Runnable {
|
|
||||||
@CommandLine.Parameters(description = "the flow file to test")
|
|
||||||
private File file;
|
|
||||||
|
|
||||||
private static final YamlFlowParser yamlFlowParser = new YamlFlowParser();
|
|
||||||
|
|
||||||
public void run() {
|
|
||||||
MemoryRunner runner = new MemoryRunner();
|
|
||||||
|
|
||||||
Flow flow = null;
|
|
||||||
try {
|
|
||||||
flow = yamlFlowParser.parse(file);
|
|
||||||
Execution execution = runner.run(flow);
|
|
||||||
} catch (IOException | InterruptedException e) {
|
|
||||||
log.error("Failed flow", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +1,41 @@
|
|||||||
micronaut:
|
micronaut:
|
||||||
application:
|
application:
|
||||||
name: floworc
|
name: floworc
|
||||||
|
|
||||||
|
floworc:
|
||||||
|
kafka:
|
||||||
|
defaults:
|
||||||
|
topic:
|
||||||
|
partitions: 64
|
||||||
|
replication-factor: 1
|
||||||
|
properties:
|
||||||
|
compression.type: "lz4"
|
||||||
|
|
||||||
|
consumer:
|
||||||
|
properties:
|
||||||
|
isolation.level: "read_committed"
|
||||||
|
auto.offset.reset: "earliest"
|
||||||
|
enable.auto.commit: "false"
|
||||||
|
|
||||||
|
producer:
|
||||||
|
properties:
|
||||||
|
acks: "all"
|
||||||
|
|
||||||
|
stream:
|
||||||
|
properties:
|
||||||
|
processing.guarantee: "exactly_once"
|
||||||
|
replication.factor: "${floworc.kafka.defaults.topic.replication-factor}"
|
||||||
|
acks: "all"
|
||||||
|
|
||||||
|
topics:
|
||||||
|
org.floworc.core.models.executions.Execution:
|
||||||
|
name: "floworc_execution"
|
||||||
|
properties:
|
||||||
|
cleanup.policy: "compact"
|
||||||
|
retention.ms: "-1"
|
||||||
|
|
||||||
|
org.floworc.core.runners.WorkerTask:
|
||||||
|
name: "floworc_workertask"
|
||||||
|
|
||||||
|
org.floworc.core.runners.WorkerTaskResult:
|
||||||
|
name: "floworc_workertaskresult"
|
||||||
|
|||||||
@@ -32,16 +32,22 @@
|
|||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<root level="INFO">
|
<root level="WARN">
|
||||||
<appender-ref ref="STDOUT" />
|
<appender-ref ref="STDOUT" />
|
||||||
<appender-ref ref="STDERR" />
|
<appender-ref ref="STDERR" />
|
||||||
</root>
|
</root>
|
||||||
|
|
||||||
<logger name="org.apache" level="WARN" />
|
<logger name="org.floworc" level="INFO" />
|
||||||
<logger name="io.confluent" level="WARN" />
|
<logger name="flow" level="INFO" />
|
||||||
|
|
||||||
|
<logger name="org.floworc.runner.kafka.services" level="WARN" />
|
||||||
|
|
||||||
<!-- The configuration '%s' was supplied but isn't a known config. > https://github.com/apache/kafka/pull/5876 -->
|
<!-- The configuration '%s' was supplied but isn't a known config. > https://github.com/apache/kafka/pull/5876 -->
|
||||||
<logger name="org.apache.kafka.clients.producer.ProducerConfig" level="ERROR" />
|
<logger name="org.apache.kafka.clients.producer.ProducerConfig" level="ERROR" />
|
||||||
<logger name="org.apache.kafka.clients.admin.AdminClientConfig" level="ERROR" />
|
<logger name="org.apache.kafka.clients.admin.AdminClientConfig" level="ERROR" />
|
||||||
<logger name="org.apache.kafka.clients.consumer.ConsumerConfig" level="ERROR" />
|
<logger name="org.apache.kafka.clients.consumer.ConsumerConfig" level="ERROR" />
|
||||||
|
|
||||||
|
<!--- Error registering AppInfo mbean -->
|
||||||
|
<logger name="org.apache.kafka.common.utils.AppInfoParser" level="ERROR" />
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
@@ -11,9 +11,4 @@ dependencies {
|
|||||||
|
|
||||||
// validations
|
// validations
|
||||||
compile group: 'org.hibernate.validator', name: 'hibernate-validator', version: '6.0.17.Final'
|
compile group: 'org.hibernate.validator', name: 'hibernate-validator', version: '6.0.17.Final'
|
||||||
|
|
||||||
// redis
|
|
||||||
implementation 'com.github.lettuce-io:lettuce-core:master'
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package org.floworc.core.models.executions;
|
package org.floworc.core.models.executions;
|
||||||
|
|
||||||
import lombok.*;
|
import lombok.Builder;
|
||||||
|
import lombok.Value;
|
||||||
import lombok.experimental.Wither;
|
import lombok.experimental.Wither;
|
||||||
import org.floworc.core.models.flows.State;
|
import org.floworc.core.models.flows.State;
|
||||||
import org.floworc.core.models.tasks.Task;
|
import org.floworc.core.models.tasks.Task;
|
||||||
|
|||||||
@@ -5,6 +5,10 @@ import org.floworc.core.runners.WorkerTask;
|
|||||||
import org.floworc.core.runners.WorkerTaskResult;
|
import org.floworc.core.runners.WorkerTaskResult;
|
||||||
|
|
||||||
public interface QueueFactoryInterface {
|
public interface QueueFactoryInterface {
|
||||||
|
String EXECUTION_NAMED = "executionQueue";
|
||||||
|
String WORKERTASK_NAMED = "workerTaskQueue";
|
||||||
|
String WORKERTASKRESULT_NAMED = "workerTaskResultQueue";
|
||||||
|
|
||||||
QueueInterface<Execution> execution();
|
QueueInterface<Execution> execution();
|
||||||
|
|
||||||
QueueInterface<WorkerTask> workerTask();
|
QueueInterface<WorkerTask> workerTask();
|
||||||
|
|||||||
@@ -6,7 +6,5 @@ import java.util.function.Consumer;
|
|||||||
public interface QueueInterface<T> extends Closeable {
|
public interface QueueInterface<T> extends Closeable {
|
||||||
void emit(T message);
|
void emit(T message);
|
||||||
|
|
||||||
void receive(Class consumerGroup, Consumer<T> consumer);
|
Runnable receive(Class consumerGroup, Consumer<T> consumer);
|
||||||
|
|
||||||
void ack(T message);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,15 @@ import java.util.List;
|
|||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
public interface FlowRepositoryInterface {
|
public interface FlowRepositoryInterface {
|
||||||
Optional<Flow> getFlowById(String id);
|
Optional<Flow> findById(String id);
|
||||||
|
|
||||||
List<Flow> getFlows();
|
List<Flow> findAll();
|
||||||
|
|
||||||
|
void save(Flow flow);
|
||||||
|
|
||||||
|
void insert(Flow flow);
|
||||||
|
|
||||||
|
void update(Flow flow);
|
||||||
|
|
||||||
|
void delete(Flow flow);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,39 @@
|
|||||||
|
package org.floworc.core.repositories;
|
||||||
|
|
||||||
|
import org.floworc.core.models.flows.Flow;
|
||||||
|
import org.floworc.core.serializers.YamlFlowParser;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
public class LocalFlowRepositoryLoader {
|
||||||
|
private static final YamlFlowParser yamlFlowParser = new YamlFlowParser();
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private FlowRepositoryInterface flowRepository;
|
||||||
|
|
||||||
|
public void load(URL basePath) throws IOException, URISyntaxException {
|
||||||
|
this.load(new File(basePath.toURI()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void load(File basePath) throws IOException {
|
||||||
|
List<Path> list = Files.walk(basePath.toPath())
|
||||||
|
.filter(path -> com.google.common.io.Files.getFileExtension(path.toString()).equals("yaml"))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
for (Path file: list) {
|
||||||
|
Flow parse = yamlFlowParser.parse(file.toFile());
|
||||||
|
|
||||||
|
flowRepository.save(parse);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,4 @@
|
|||||||
package org.floworc.core.runners;
|
package org.floworc.core.runners;
|
||||||
|
|
||||||
import io.micronaut.context.annotation.Prototype;
|
|
||||||
|
|
||||||
@Prototype
|
|
||||||
public interface ExecutionStateInterface extends Runnable {
|
public interface ExecutionStateInterface extends Runnable {
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ public class Executor implements Runnable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Flow flow = this.flowRepository
|
Flow flow = this.flowRepository
|
||||||
.getFlowById(execution.getFlowId())
|
.findById(execution.getFlowId())
|
||||||
.orElseThrow(() -> new IllegalArgumentException("Invalid flow id '" + execution.getFlowId() + "'"));
|
.orElseThrow(() -> new IllegalArgumentException("Invalid flow id '" + execution.getFlowId() + "'"));
|
||||||
|
|
||||||
this.executionService.getNexts(execution, flow.getTasks())
|
this.executionService.getNexts(execution, flow.getTasks())
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package org.floworc.core.runners;
|
|||||||
import io.micronaut.context.annotation.Factory;
|
import io.micronaut.context.annotation.Factory;
|
||||||
import io.micronaut.context.annotation.Prototype;
|
import io.micronaut.context.annotation.Prototype;
|
||||||
import org.floworc.core.models.executions.Execution;
|
import org.floworc.core.models.executions.Execution;
|
||||||
|
import org.floworc.core.queues.QueueFactoryInterface;
|
||||||
import org.floworc.core.queues.QueueInterface;
|
import org.floworc.core.queues.QueueInterface;
|
||||||
import org.floworc.core.repositories.FlowRepositoryInterface;
|
import org.floworc.core.repositories.FlowRepositoryInterface;
|
||||||
|
|
||||||
@@ -14,9 +15,9 @@ public class RunnerProcessFactory {
|
|||||||
@Prototype
|
@Prototype
|
||||||
@Inject
|
@Inject
|
||||||
public Worker worker(
|
public Worker worker(
|
||||||
@Named("executionQueue") QueueInterface<Execution> executionQueue,
|
@Named(QueueFactoryInterface.EXECUTION_NAMED) QueueInterface<Execution> executionQueue,
|
||||||
@Named("workerTaskQueue") QueueInterface<WorkerTask> workerTaskQueue,
|
@Named(QueueFactoryInterface.WORKERTASK_NAMED) QueueInterface<WorkerTask> workerTaskQueue,
|
||||||
@Named("workerTaskResultQueue") QueueInterface<WorkerTaskResult> workerTaskResultQueue
|
@Named(QueueFactoryInterface.WORKERTASKRESULT_NAMED) QueueInterface<WorkerTaskResult> workerTaskResultQueue
|
||||||
) {
|
) {
|
||||||
return new Worker(
|
return new Worker(
|
||||||
workerTaskQueue,
|
workerTaskQueue,
|
||||||
@@ -26,9 +27,9 @@ public class RunnerProcessFactory {
|
|||||||
|
|
||||||
@Prototype
|
@Prototype
|
||||||
public Executor executor(
|
public Executor executor(
|
||||||
@Named("executionQueue") QueueInterface<Execution> executionQueue,
|
@Named(QueueFactoryInterface.EXECUTION_NAMED) QueueInterface<Execution> executionQueue,
|
||||||
@Named("workerTaskQueue") QueueInterface<WorkerTask> workerTaskQueue,
|
@Named(QueueFactoryInterface.WORKERTASK_NAMED) QueueInterface<WorkerTask> workerTaskQueue,
|
||||||
@Named("workerTaskResultQueue") QueueInterface<WorkerTaskResult> workerTaskResultQueue,
|
@Named(QueueFactoryInterface.WORKERTASKRESULT_NAMED) QueueInterface<WorkerTaskResult> workerTaskResultQueue,
|
||||||
FlowRepositoryInterface flowRepository,
|
FlowRepositoryInterface flowRepository,
|
||||||
ExecutionService executionService
|
ExecutionService executionService
|
||||||
) {
|
) {
|
||||||
@@ -42,7 +43,7 @@ public class RunnerProcessFactory {
|
|||||||
|
|
||||||
@Prototype
|
@Prototype
|
||||||
public ExecutionService executionService(
|
public ExecutionService executionService(
|
||||||
@Named("workerTaskResultQueue") QueueInterface<WorkerTaskResult> workerTaskResultQueue
|
@Named(QueueFactoryInterface.WORKERTASKRESULT_NAMED) QueueInterface<WorkerTaskResult> workerTaskResultQueue
|
||||||
) {
|
) {
|
||||||
return new ExecutionService(workerTaskResultQueue);
|
return new ExecutionService(workerTaskResultQueue);
|
||||||
}
|
}
|
||||||
|
|||||||
59
core/src/main/java/org/floworc/core/runners/RunnerUtils.java
Normal file
59
core/src/main/java/org/floworc/core/runners/RunnerUtils.java
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
package org.floworc.core.runners;
|
||||||
|
|
||||||
|
import com.devskiller.friendly_id.FriendlyId;
|
||||||
|
import org.floworc.core.models.executions.Execution;
|
||||||
|
import org.floworc.core.models.flows.Flow;
|
||||||
|
import org.floworc.core.models.flows.State;
|
||||||
|
import org.floworc.core.queues.QueueFactoryInterface;
|
||||||
|
import org.floworc.core.queues.QueueInterface;
|
||||||
|
import org.floworc.core.repositories.FlowRepositoryInterface;
|
||||||
|
import org.floworc.core.utils.Await;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Named;
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
public class RunnerUtils {
|
||||||
|
@Inject
|
||||||
|
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||||
|
protected QueueInterface<Execution> executionQueue;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private FlowRepositoryInterface flowRepository;
|
||||||
|
|
||||||
|
public Execution runOne(String flowId) throws TimeoutException {
|
||||||
|
return this.runOne(
|
||||||
|
flowRepository
|
||||||
|
.findById(flowId)
|
||||||
|
.orElseThrow(() -> new IllegalArgumentException("Unable to find execution '" + flowId + "'"))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Execution runOne(Flow flow) throws TimeoutException {
|
||||||
|
Execution execution = Execution.builder()
|
||||||
|
.id(FriendlyId.createFriendlyId())
|
||||||
|
.flowId(flow.getId())
|
||||||
|
.state(new State())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
AtomicReference<Execution> receive = new AtomicReference<>();
|
||||||
|
|
||||||
|
Runnable cancel = this.executionQueue.receive(StandAloneRunner.class, current -> {
|
||||||
|
if (current.getId().equals(execution.getId()) && current.getState().isTerninated()) {
|
||||||
|
receive.set(current);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.executionQueue.emit(execution);
|
||||||
|
|
||||||
|
Await.until(() -> receive.get() != null, 5 * 1000);
|
||||||
|
|
||||||
|
cancel.run();
|
||||||
|
|
||||||
|
return receive.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,44 +1,48 @@
|
|||||||
package org.floworc.core.runners;
|
package org.floworc.core.runners;
|
||||||
|
|
||||||
import com.devskiller.friendly_id.FriendlyId;
|
|
||||||
import io.micronaut.context.ApplicationContext;
|
import io.micronaut.context.ApplicationContext;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.floworc.core.models.executions.Execution;
|
import org.floworc.core.models.executions.Execution;
|
||||||
import org.floworc.core.models.flows.Flow;
|
import org.floworc.core.queues.QueueFactoryInterface;
|
||||||
import org.floworc.core.models.flows.State;
|
|
||||||
import org.floworc.core.queues.QueueInterface;
|
import org.floworc.core.queues.QueueInterface;
|
||||||
|
import org.floworc.core.repositories.FlowRepositoryInterface;
|
||||||
|
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
import javax.inject.Named;
|
import javax.inject.Named;
|
||||||
|
import java.io.Closeable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class StandAloneRunner implements RunnerInterface {
|
public class StandAloneRunner implements RunnerInterface, Closeable {
|
||||||
private ExecutorService poolExecutor = Executors.newCachedThreadPool();
|
private ExecutorService poolExecutor = Executors.newCachedThreadPool();
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
@Named("executionQueue")
|
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||||
protected QueueInterface<Execution> executionQueue;
|
protected QueueInterface<Execution> executionQueue;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
@Named("workerTaskQueue")
|
@Named(QueueFactoryInterface.WORKERTASK_NAMED)
|
||||||
protected QueueInterface<WorkerTask> workerTaskQueue;
|
protected QueueInterface<WorkerTask> workerTaskQueue;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
@Named("workerTaskResultQueue")
|
@Named(QueueFactoryInterface.WORKERTASKRESULT_NAMED)
|
||||||
protected QueueInterface<WorkerTaskResult> workerTaskResultQueue;
|
protected QueueInterface<WorkerTaskResult> workerTaskResultQueue;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
private ApplicationContext applicationContext;
|
private ApplicationContext applicationContext;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private FlowRepositoryInterface flowRepository;
|
||||||
|
|
||||||
|
private boolean running = false;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
int processors = Math.max(3, Runtime.getRuntime().availableProcessors());
|
this.running = true;
|
||||||
|
|
||||||
|
int processors = Math.max(3, Runtime.getRuntime().availableProcessors());
|
||||||
for (int i = 0; i < processors; i++) {
|
for (int i = 0; i < processors; i++) {
|
||||||
poolExecutor.execute(applicationContext.getBean(Executor.class));
|
poolExecutor.execute(applicationContext.getBean(Executor.class));
|
||||||
|
|
||||||
@@ -48,33 +52,16 @@ public class StandAloneRunner implements RunnerInterface {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public Execution runOne(Flow flow) throws InterruptedException, IOException {
|
public boolean isRunning() {
|
||||||
this.run();
|
return this.running;
|
||||||
|
}
|
||||||
Execution execution = Execution.builder()
|
|
||||||
.id(FriendlyId.createFriendlyId())
|
|
||||||
.flowId(flow.getId())
|
|
||||||
.state(new State())
|
|
||||||
.build();
|
|
||||||
|
|
||||||
AtomicReference<Execution> receive = new AtomicReference<>();
|
|
||||||
|
|
||||||
this.executionQueue.receive(StandAloneRunner.class, current -> {
|
|
||||||
if (current.getState().isTerninated()) {
|
|
||||||
receive.set(current);
|
|
||||||
|
|
||||||
poolExecutor.shutdown();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.executionQueue.emit(execution);
|
|
||||||
|
|
||||||
poolExecutor.awaitTermination(1, TimeUnit.MINUTES);
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
this.poolExecutor.shutdown();
|
||||||
this.executionQueue.close();
|
this.executionQueue.close();
|
||||||
this.workerTaskQueue.close();
|
this.workerTaskQueue.close();
|
||||||
this.workerTaskResultQueue.close();
|
this.workerTaskResultQueue.close();
|
||||||
|
|
||||||
return receive.get();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,24 +31,27 @@ public class Worker implements Runnable {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (workerTask.getTask() instanceof RunnableTask) {
|
if (workerTask.getTask() instanceof RunnableTask) {
|
||||||
|
State.Type state;
|
||||||
|
|
||||||
RunnableTask task = (RunnableTask) workerTask.getTask();
|
RunnableTask task = (RunnableTask) workerTask.getTask();
|
||||||
try {
|
try {
|
||||||
task.run();
|
task.run();
|
||||||
|
state = State.Type.SUCCESS;
|
||||||
this.workerTaskResultQueue.emit(
|
|
||||||
new WorkerTaskResult(workerTask, State.Type.SUCCESS)
|
|
||||||
);
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
workerTask.logger().error("Failed task", e);
|
workerTask.logger().error("Failed task", e);
|
||||||
|
state = State.Type.FAILED;
|
||||||
this.workerTaskResultQueue.emit(new WorkerTaskResult(workerTask, State.Type.FAILED));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.workerTaskResultQueue.emit(
|
||||||
|
new WorkerTaskResult(workerTask, state)
|
||||||
|
);
|
||||||
|
|
||||||
workerTask.logger().info(
|
workerTask.logger().info(
|
||||||
"[execution: {}] [taskrun: {}] Task {} completed in {}",
|
"[execution: {}] [taskrun: {}] Task {} with state {} completed in {}",
|
||||||
workerTask.getTaskRun().getExecutionId(),
|
workerTask.getTaskRun().getExecutionId(),
|
||||||
workerTask.getTaskRun().getId(),
|
workerTask.getTaskRun().getId(),
|
||||||
workerTask.getTask().getClass().getSimpleName(),
|
workerTask.getTask().getClass().getSimpleName(),
|
||||||
|
state,
|
||||||
workerTask.getTaskRun().getState().humanDuration()
|
workerTask.getTaskRun().getState().humanDuration()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,29 @@
|
|||||||
|
|
||||||
|
package org.floworc.core.serializers;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
|
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
|
||||||
|
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||||
|
import com.fasterxml.jackson.module.paramnames.ParameterNamesModule;
|
||||||
|
|
||||||
|
abstract public class JacksonMapper {
|
||||||
|
public static ObjectMapper ofJson() {
|
||||||
|
return JacksonMapper.configure(
|
||||||
|
new ObjectMapper()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ObjectMapper ofYaml() {
|
||||||
|
return JacksonMapper.configure(
|
||||||
|
new ObjectMapper(new YAMLFactory())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ObjectMapper configure(ObjectMapper mapper) {
|
||||||
|
return mapper
|
||||||
|
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
|
||||||
|
.registerModule(new JavaTimeModule())
|
||||||
|
.registerModule(new ParameterNamesModule());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -15,9 +15,7 @@ import java.io.IOException;
|
|||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
public class YamlFlowParser {
|
public class YamlFlowParser {
|
||||||
private static final ObjectMapper mapper = new ObjectMapper(new YAMLFactory())
|
private static final ObjectMapper mapper = JacksonMapper.ofYaml();
|
||||||
.registerModule(new Jdk8Module())
|
|
||||||
.registerModule(new JavaTimeModule());
|
|
||||||
|
|
||||||
private static final Validator validator = Validation.byDefaultProvider()
|
private static final Validator validator = Validation.byDefaultProvider()
|
||||||
.configure()
|
.configure()
|
||||||
|
|||||||
32
core/src/main/java/org/floworc/core/utils/Await.java
Normal file
32
core/src/main/java/org/floworc/core/utils/Await.java
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
package org.floworc.core.utils;
|
||||||
|
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
import java.util.function.BooleanSupplier;
|
||||||
|
|
||||||
|
public class Await {
|
||||||
|
public static void until(BooleanSupplier condition) {
|
||||||
|
while (!condition.getAsBoolean()) {
|
||||||
|
try {
|
||||||
|
Thread.sleep(100);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw new RuntimeException("Can't sleep");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void until(BooleanSupplier condition, long timeoutMs) throws TimeoutException {
|
||||||
|
long start = System.currentTimeMillis();
|
||||||
|
while (!condition.getAsBoolean()) {
|
||||||
|
if (System.currentTimeMillis() - start > timeoutMs) {
|
||||||
|
throw new TimeoutException(String.format("Execution failed to terminate within %s ms", timeoutMs));
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
Thread.sleep(100);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw new RuntimeException("Can't sleep");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,11 +1,14 @@
|
|||||||
package org.floworc.core;
|
package org.floworc.core;
|
||||||
|
|
||||||
import org.floworc.core.models.flows.Flow;
|
import org.floworc.core.models.flows.Flow;
|
||||||
|
import org.floworc.core.repositories.LocalFlowRepositoryLoader;
|
||||||
import org.floworc.core.serializers.YamlFlowParser;
|
import org.floworc.core.serializers.YamlFlowParser;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
|
|
||||||
@@ -20,4 +23,13 @@ abstract public class Utils {
|
|||||||
|
|
||||||
return yamlFlowParser.parse(file);
|
return yamlFlowParser.parse(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void loads(LocalFlowRepositoryLoader repositoryLoader) throws IOException, URISyntaxException {
|
||||||
|
Utils.loads(repositoryLoader, "flows/valids");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void loads(LocalFlowRepositoryLoader repositoryLoader, String path) throws IOException, URISyntaxException {
|
||||||
|
URL url = Objects.requireNonNull(Utils.class.getClassLoader().getResource(path));
|
||||||
|
repositoryLoader.load(url);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
|
|||||||
class YamlFlowParserTest {
|
class YamlFlowParserTest {
|
||||||
@Test
|
@Test
|
||||||
void parse() throws IOException {
|
void parse() throws IOException {
|
||||||
Flow flow = Utils.parse("flows/full.yaml");
|
Flow flow = Utils.parse("flows/valids/full.yaml");
|
||||||
|
|
||||||
assertThat(flow.getId(), is("full"));
|
assertThat(flow.getId(), is("full"));
|
||||||
assertThat(flow.getTasks().size(), is(5));
|
assertThat(flow.getTasks().size(), is(5));
|
||||||
@@ -31,11 +31,11 @@ class YamlFlowParserTest {
|
|||||||
@Test
|
@Test
|
||||||
void validation() throws IOException {
|
void validation() throws IOException {
|
||||||
assertThrows(InvalidDefinitionException.class, () -> {
|
assertThrows(InvalidDefinitionException.class, () -> {
|
||||||
Utils.parse("flows/invalid.yaml");
|
Utils.parse("flows/invalids/invalid.yaml");
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Utils.parse("flows/invalid.yaml");
|
Utils.parse("flows/invalids/invalid.yaml");
|
||||||
} catch (InvalidDefinitionException e) {
|
} catch (InvalidDefinitionException e) {
|
||||||
assertThat(e.getViolations().size(), is(3));
|
assertThat(e.getViolations().size(), is(3));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,4 +20,15 @@
|
|||||||
|
|
||||||
<logger name="org.floworc" level="INFO" />
|
<logger name="org.floworc" level="INFO" />
|
||||||
<logger name="flow" level="INFO" />
|
<logger name="flow" level="INFO" />
|
||||||
|
|
||||||
|
<logger name="org.floworc.runner.kafka.services" level="WARN" />
|
||||||
|
|
||||||
|
<!-- The configuration '%s' was supplied but isn't a known config. > https://github.com/apache/kafka/pull/5876 -->
|
||||||
|
<logger name="org.apache.kafka.clients.producer.ProducerConfig" level="ERROR" />
|
||||||
|
<logger name="org.apache.kafka.clients.admin.AdminClientConfig" level="ERROR" />
|
||||||
|
<logger name="org.apache.kafka.clients.consumer.ConsumerConfig" level="ERROR" />
|
||||||
|
|
||||||
|
<!--- Error registering AppInfo mbean -->
|
||||||
|
<logger name="org.apache.kafka.common.utils.AppInfoParser" level="ERROR" />
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
package org.floworc.repository.local;
|
|
||||||
|
|
||||||
import io.micronaut.context.annotation.Value;
|
|
||||||
import org.floworc.core.models.flows.Flow;
|
|
||||||
import org.floworc.core.repositories.FlowRepositoryInterface;
|
|
||||||
import org.floworc.core.serializers.YamlFlowParser;
|
|
||||||
|
|
||||||
import javax.inject.Singleton;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
@Singleton
|
|
||||||
public class LocalFlowRepository implements FlowRepositoryInterface {
|
|
||||||
@Value("${floworc.repository.local.base-path}")
|
|
||||||
private File basePath;
|
|
||||||
|
|
||||||
private static final YamlFlowParser yamlFlowParser = new YamlFlowParser();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Optional<Flow> getFlowById(String id) {
|
|
||||||
File file = new File(this.basePath, id + ".yaml");
|
|
||||||
try {
|
|
||||||
return Optional.of(yamlFlowParser.parse(file));
|
|
||||||
} catch (IOException e) {
|
|
||||||
return Optional.empty();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<Flow> getFlows() {
|
|
||||||
try {
|
|
||||||
return Files.list(this.basePath.toPath())
|
|
||||||
.map(path -> this.getFlowById(path.toFile().getName()))
|
|
||||||
.filter(Optional::isPresent)
|
|
||||||
.map(Optional::get)
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,54 @@
|
|||||||
|
package org.floworc.repository.memory;
|
||||||
|
|
||||||
|
import org.floworc.core.models.flows.Flow;
|
||||||
|
import org.floworc.core.repositories.FlowRepositoryInterface;
|
||||||
|
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
public class MemoryFlowRepository implements FlowRepositoryInterface {
|
||||||
|
private Map<String, Flow> flows = new HashMap<>();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<Flow> findById(String id) {
|
||||||
|
return this.flows.containsKey(id) ? Optional.of(this.flows.get(id)) : Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<Flow> findAll() {
|
||||||
|
return new ArrayList<>(this.flows.values());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void save(Flow flow) {
|
||||||
|
this.flows.put(flow.getId(), flow);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void insert(Flow flow) {
|
||||||
|
if (this.flows.containsKey(flow.getId())) {
|
||||||
|
throw new IllegalStateException("Flow " + flow.getId() + " already exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.flows.put(flow.getId(), flow);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void update(Flow flow) {
|
||||||
|
if (!this.flows.containsKey(flow.getId())) {
|
||||||
|
throw new IllegalStateException("Flow " + flow.getId() + " already exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.flows.put(flow.getId(), flow);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void delete(Flow flow) {
|
||||||
|
if (!this.flows.containsKey(flow.getId())) {
|
||||||
|
throw new IllegalStateException("Flow " + flow.getId() + " already exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.flows.remove(flow.getId());
|
||||||
|
}
|
||||||
|
}
|
||||||
11
runner-kafka/build.gradle
Normal file
11
runner-kafka/build.gradle
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
sourceCompatibility = 11
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
compile project(":core")
|
||||||
|
|
||||||
|
compile group: "org.apache.kafka", name: "kafka-clients", version: kafkaVersion
|
||||||
|
compile group: "org.apache.kafka", name: 'kafka-streams', version: kafkaVersion
|
||||||
|
|
||||||
|
testCompile project(':core').sourceSets.test.output
|
||||||
|
testCompile project(':repository-memory').sourceSets.main.output
|
||||||
|
}
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
package org.floworc.runner.kafka;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.Prototype;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.kafka.common.serialization.Serdes;
|
||||||
|
import org.apache.kafka.common.utils.Bytes;
|
||||||
|
import org.apache.kafka.streams.StreamsBuilder;
|
||||||
|
import org.apache.kafka.streams.Topology;
|
||||||
|
import org.apache.kafka.streams.kstream.Consumed;
|
||||||
|
import org.apache.kafka.streams.kstream.Materialized;
|
||||||
|
import org.apache.kafka.streams.kstream.Produced;
|
||||||
|
import org.apache.kafka.streams.state.KeyValueStore;
|
||||||
|
import org.floworc.core.models.executions.Execution;
|
||||||
|
import org.floworc.core.runners.ExecutionStateInterface;
|
||||||
|
import org.floworc.core.runners.WorkerTaskResult;
|
||||||
|
import org.floworc.runner.kafka.serializers.JsonSerde;
|
||||||
|
import org.floworc.runner.kafka.services.KafkaAdminService;
|
||||||
|
import org.floworc.runner.kafka.services.KafkaStreamService;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
@KafkaQueueEnabled
|
||||||
|
@Prototype
|
||||||
|
public class KafkaExecutionState implements ExecutionStateInterface {
|
||||||
|
@Inject
|
||||||
|
KafkaStreamService kafkaStreamService;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
KafkaAdminService kafkaAdminService;
|
||||||
|
|
||||||
|
private Topology topology() {
|
||||||
|
kafkaAdminService.createIfNotExist(WorkerTaskResult.class);
|
||||||
|
kafkaAdminService.createIfNotExist(Execution.class);
|
||||||
|
|
||||||
|
StreamsBuilder builder = new StreamsBuilder();
|
||||||
|
|
||||||
|
builder
|
||||||
|
.stream(
|
||||||
|
kafkaAdminService.getTopicName(WorkerTaskResult.class),
|
||||||
|
Consumed.with(Serdes.String(), JsonSerde.of(WorkerTaskResult.class))
|
||||||
|
)
|
||||||
|
.leftJoin(
|
||||||
|
builder.table(
|
||||||
|
kafkaAdminService.getTopicName(Execution.class),
|
||||||
|
Consumed.with(Serdes.String(), JsonSerde.of(Execution.class)),
|
||||||
|
Materialized.<String, Execution, KeyValueStore<Bytes, byte[]>>as("execution_join")
|
||||||
|
.withKeySerde(Serdes.String())
|
||||||
|
.withValueSerde(JsonSerde.of(Execution.class))
|
||||||
|
),
|
||||||
|
(workerTaskResult, execution) -> execution.withTaskRun(workerTaskResult.getTaskRun())
|
||||||
|
)
|
||||||
|
.to(
|
||||||
|
kafkaAdminService.getTopicName(Execution.class),
|
||||||
|
Produced.with(Serdes.String(), JsonSerde.of(Execution.class))
|
||||||
|
);
|
||||||
|
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
KafkaStreamService.Stream stream = kafkaStreamService.of(KafkaExecutionState.class, this.topology());
|
||||||
|
stream.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,127 @@
|
|||||||
|
package org.floworc.runner.kafka;
|
||||||
|
|
||||||
|
import com.google.common.base.CaseFormat;
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import io.micronaut.context.ApplicationContext;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||||
|
import org.apache.kafka.clients.consumer.KafkaConsumer;
|
||||||
|
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
|
||||||
|
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||||
|
import org.apache.kafka.common.TopicPartition;
|
||||||
|
import org.floworc.core.models.executions.Execution;
|
||||||
|
import org.floworc.core.queues.QueueInterface;
|
||||||
|
import org.floworc.core.runners.WorkerTask;
|
||||||
|
import org.floworc.core.runners.WorkerTaskResult;
|
||||||
|
import org.floworc.runner.kafka.configs.TopicsConfig;
|
||||||
|
import org.floworc.runner.kafka.serializers.JsonSerde;
|
||||||
|
import org.floworc.runner.kafka.services.KafkaAdminService;
|
||||||
|
import org.floworc.runner.kafka.services.KafkaConsumerService;
|
||||||
|
import org.floworc.runner.kafka.services.KafkaProducerService;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.concurrent.Executors;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public class KafkaQueue<T> implements QueueInterface<T> {
|
||||||
|
private Class<T> cls;
|
||||||
|
private KafkaConsumerService kafkaConsumerService;
|
||||||
|
private KafkaProducerService kafkaProducerService;
|
||||||
|
private TopicsConfig topicsConfig;
|
||||||
|
private static ExecutorService poolExecutor = Executors.newCachedThreadPool();
|
||||||
|
|
||||||
|
public KafkaQueue(Class<T> cls, ApplicationContext applicationContext) {
|
||||||
|
this.cls = cls;
|
||||||
|
this.kafkaConsumerService = applicationContext.getBean(KafkaConsumerService.class);
|
||||||
|
this.kafkaProducerService = applicationContext.getBean(KafkaProducerService.class);
|
||||||
|
this.topicsConfig = applicationContext
|
||||||
|
.getBeansOfType(TopicsConfig.class)
|
||||||
|
.stream()
|
||||||
|
.filter(r -> r.getCls().equals(this.cls.getName().toLowerCase().replace(".", "-")))
|
||||||
|
.findFirst()
|
||||||
|
.orElseThrow();
|
||||||
|
|
||||||
|
applicationContext
|
||||||
|
.getBean(KafkaAdminService.class)
|
||||||
|
.createIfNotExist(this.cls);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String key(Object object) {
|
||||||
|
if (this.cls == Execution.class) {
|
||||||
|
return ((Execution) object).getId();
|
||||||
|
} else if (this.cls == WorkerTask.class) {
|
||||||
|
return ((WorkerTask) object).getTaskRun().getExecutionId();
|
||||||
|
} else if (this.cls == WorkerTaskResult.class) {
|
||||||
|
return ((WorkerTaskResult) object).getTaskRun().getExecutionId();
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Unknown type '" + this.cls.getName() + "'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void emit(T message) {
|
||||||
|
if (log.isTraceEnabled()) {
|
||||||
|
log.trace("New message: topic '{}', value {}", topicsConfig.getName(), message);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
kafkaProducerService
|
||||||
|
.of(cls, JsonSerde.of(cls))
|
||||||
|
.send(new ProducerRecord<>(topicsConfig.getName(), this.key(message), message))
|
||||||
|
.get();
|
||||||
|
} catch (InterruptedException | ExecutionException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Runnable receive(Class consumerGroup, Consumer<T> consumer) {
|
||||||
|
AtomicBoolean running = new AtomicBoolean(true);
|
||||||
|
Runnable exitCallback = () -> running.set(false);
|
||||||
|
|
||||||
|
poolExecutor.execute(() -> {
|
||||||
|
KafkaConsumer<String, T> kafkaConsumer = kafkaConsumerService.of(
|
||||||
|
consumerGroup,
|
||||||
|
JsonSerde.of(this.cls)
|
||||||
|
);
|
||||||
|
|
||||||
|
kafkaConsumer.subscribe(Collections.singleton(topicsConfig.getName()));
|
||||||
|
|
||||||
|
while (running.get()) {
|
||||||
|
ConsumerRecords<String, T> records = kafkaConsumer.poll(Duration.ofSeconds(1));
|
||||||
|
|
||||||
|
records.forEach(record -> {
|
||||||
|
consumer.accept(record.value());
|
||||||
|
kafkaConsumer.commitSync(
|
||||||
|
ImmutableMap.of(
|
||||||
|
new TopicPartition(record.topic(), record.partition()),
|
||||||
|
new OffsetAndMetadata(record.offset())
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return exitCallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getConsumerGroupName(Class group) {
|
||||||
|
return "floworc_" +
|
||||||
|
CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE,
|
||||||
|
group.getSimpleName().replace("Kafka", "")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
if (!poolExecutor.isShutdown()) {
|
||||||
|
poolExecutor.shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
package org.floworc.runner.kafka;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.Requires;
|
||||||
|
|
||||||
|
import java.lang.annotation.*;
|
||||||
|
|
||||||
|
@Documented
|
||||||
|
@Retention(RetentionPolicy.RUNTIME)
|
||||||
|
@Target({ElementType.PACKAGE, ElementType.TYPE})
|
||||||
|
@Requires(property = "floworc.queue.type", value = "kafka")
|
||||||
|
public @interface KafkaQueueEnabled {
|
||||||
|
}
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
package org.floworc.runner.kafka;
|
||||||
|
|
||||||
|
import io.micronaut.context.ApplicationContext;
|
||||||
|
import io.micronaut.context.annotation.Factory;
|
||||||
|
import org.floworc.core.models.executions.Execution;
|
||||||
|
import org.floworc.core.queues.QueueFactoryInterface;
|
||||||
|
import org.floworc.core.queues.QueueInterface;
|
||||||
|
import org.floworc.core.runners.WorkerTask;
|
||||||
|
import org.floworc.core.runners.WorkerTaskResult;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Named;
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
|
||||||
|
@Factory
|
||||||
|
@KafkaQueueEnabled
|
||||||
|
public class KafkaQueueFactory implements QueueFactoryInterface {
|
||||||
|
@Inject
|
||||||
|
ApplicationContext applicationContext;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||||
|
public QueueInterface<Execution> execution() {
|
||||||
|
return new KafkaQueue<>(Execution.class, applicationContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
@Named(QueueFactoryInterface.WORKERTASK_NAMED)
|
||||||
|
public QueueInterface<WorkerTask> workerTask() {
|
||||||
|
return new KafkaQueue<>(WorkerTask.class, applicationContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
@Named(QueueFactoryInterface.WORKERTASKRESULT_NAMED)
|
||||||
|
public QueueInterface<WorkerTaskResult> workerTaskResult() {
|
||||||
|
return new KafkaQueue<>(WorkerTaskResult.class, applicationContext);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package org.floworc.runner.kafka.configs;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||||
|
import io.micronaut.core.convert.format.MapFormat;
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ConfigurationProperties("floworc.kafka.client")
|
||||||
|
@Getter
|
||||||
|
public class ClientConfig {
|
||||||
|
@MapFormat(transformation = MapFormat.MapTransformation.FLAT)
|
||||||
|
Map<String, String> properties;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package org.floworc.runner.kafka.configs;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||||
|
import io.micronaut.core.convert.format.MapFormat;
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ConfigurationProperties("floworc.kafka.defaults.consumer")
|
||||||
|
@Getter
|
||||||
|
public class ConsumerDefaultsConfig {
|
||||||
|
@MapFormat(transformation = MapFormat.MapTransformation.FLAT)
|
||||||
|
Map<String, String> properties;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package org.floworc.runner.kafka.configs;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||||
|
import io.micronaut.core.convert.format.MapFormat;
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ConfigurationProperties("floworc.kafka.defaults.producer")
|
||||||
|
@Getter
|
||||||
|
public class ProducerDefaultsConfig {
|
||||||
|
@MapFormat(transformation = MapFormat.MapTransformation.FLAT)
|
||||||
|
Map<String, String> properties;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
package org.floworc.runner.kafka.configs;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||||
|
import io.micronaut.core.convert.format.MapFormat;
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ConfigurationProperties("floworc.kafka.defaults.stream")
|
||||||
|
@Getter
|
||||||
|
public class StreamDefaultsConfig {
|
||||||
|
@MapFormat(transformation = MapFormat.MapTransformation.FLAT)
|
||||||
|
Map<String, String> properties;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
package org.floworc.runner.kafka.configs;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.ConfigurationProperties;
|
||||||
|
import io.micronaut.core.convert.format.MapFormat;
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ConfigurationProperties("floworc.kafka.defaults.topic")
|
||||||
|
@Getter
|
||||||
|
public class TopicDefaultsConfig {
|
||||||
|
int partitions = 6;
|
||||||
|
|
||||||
|
short replicationFactor = 1;
|
||||||
|
|
||||||
|
@MapFormat(transformation = MapFormat.MapTransformation.FLAT)
|
||||||
|
Map<String, String> properties;
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
package org.floworc.runner.kafka.configs;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.EachProperty;
|
||||||
|
import io.micronaut.context.annotation.Parameter;
|
||||||
|
import io.micronaut.core.convert.format.MapFormat;
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@EachProperty("floworc.kafka.topics")
|
||||||
|
@Getter
|
||||||
|
public class TopicsConfig {
|
||||||
|
String cls;
|
||||||
|
|
||||||
|
String name;
|
||||||
|
|
||||||
|
@MapFormat(transformation = MapFormat.MapTransformation.FLAT)
|
||||||
|
Map<String, String> properties;
|
||||||
|
|
||||||
|
public TopicsConfig(@Parameter String cls) {
|
||||||
|
this.cls = cls;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
package org.floworc.runner.kafka.serializers;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.apache.kafka.common.errors.SerializationException;
|
||||||
|
import org.apache.kafka.common.serialization.Deserializer;
|
||||||
|
import org.floworc.core.serializers.JacksonMapper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class JsonDeserializer<T> implements Deserializer<T> {
|
||||||
|
private static final ObjectMapper mapper = JacksonMapper.ofJson();
|
||||||
|
private Class<T> cls;
|
||||||
|
|
||||||
|
public JsonDeserializer(Class<T> cls) {
|
||||||
|
super();
|
||||||
|
|
||||||
|
this.cls = cls;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configure(Map<String, ?> settings, boolean isKey) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public T deserialize(String topic, byte[] bytes) {
|
||||||
|
if (null == bytes) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return mapper.readValue(bytes, this.cls);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new SerializationException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
package org.floworc.runner.kafka.serializers;
|
||||||
|
|
||||||
|
import org.apache.kafka.common.serialization.Deserializer;
|
||||||
|
import org.apache.kafka.common.serialization.Serde;
|
||||||
|
import org.apache.kafka.common.serialization.Serializer;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class JsonSerde<T> implements Serde<T> {
|
||||||
|
private final Serializer<T> serializer;
|
||||||
|
private final Deserializer<T> deserializer;
|
||||||
|
|
||||||
|
private JsonSerde(Class<T> cls) {
|
||||||
|
this.deserializer = new JsonDeserializer<>(cls);
|
||||||
|
this.serializer = new JsonSerializer<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T> JsonSerde<T> of(Class<T> cls) {
|
||||||
|
return new JsonSerde<>(cls);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configure(Map<String, ?> settings, boolean isKey) {
|
||||||
|
this.serializer.configure(settings, isKey);
|
||||||
|
this.deserializer.configure(settings, isKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
this.deserializer.close();
|
||||||
|
this.serializer.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Serializer<T> serializer() {
|
||||||
|
return this.serializer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Deserializer<T> deserializer() {
|
||||||
|
return this.deserializer;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
package org.floworc.runner.kafka.serializers;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.apache.kafka.common.errors.SerializationException;
|
||||||
|
import org.apache.kafka.common.serialization.Serializer;
|
||||||
|
import org.floworc.core.serializers.JacksonMapper;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class JsonSerializer<T> extends JacksonMapper implements Serializer<T> {
|
||||||
|
private static final ObjectMapper mapper = JacksonMapper.ofJson();
|
||||||
|
|
||||||
|
public JsonSerializer() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configure(Map<String, ?> settings, boolean isKey) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] serialize(String topic, T message) {
|
||||||
|
if (null == message) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return mapper.writeValueAsBytes(message);
|
||||||
|
} catch (JsonProcessingException e) {
|
||||||
|
throw new SerializationException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public byte[] serialize(T message) {
|
||||||
|
return this.serialize("", message);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,108 @@
|
|||||||
|
package org.floworc.runner.kafka.services;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.kafka.clients.admin.AdminClient;
|
||||||
|
import org.apache.kafka.clients.admin.ConfigEntry;
|
||||||
|
import org.apache.kafka.clients.admin.NewTopic;
|
||||||
|
import org.apache.kafka.common.config.ConfigResource;
|
||||||
|
import org.apache.kafka.common.errors.TopicExistsException;
|
||||||
|
import org.floworc.runner.kafka.configs.ClientConfig;
|
||||||
|
import org.floworc.runner.kafka.configs.TopicDefaultsConfig;
|
||||||
|
import org.floworc.runner.kafka.configs.TopicsConfig;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
@Slf4j
|
||||||
|
public class KafkaAdminService {
|
||||||
|
@Inject
|
||||||
|
private ClientConfig clientConfig;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private TopicDefaultsConfig topicDefaultsConfig;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private List<TopicsConfig> topicsConfig;
|
||||||
|
|
||||||
|
public AdminClient of() {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.putAll(clientConfig.getProperties());
|
||||||
|
|
||||||
|
return AdminClient.create(properties);
|
||||||
|
}
|
||||||
|
|
||||||
|
private TopicsConfig getTopicConfig(Class cls) {
|
||||||
|
return this.topicsConfig
|
||||||
|
.stream()
|
||||||
|
.filter(r -> r.getCls().equals(cls.getName().toLowerCase().replace(".", "-")))
|
||||||
|
.findFirst()
|
||||||
|
.orElseThrow();
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
|
public boolean createIfNotExist(Class cls) {
|
||||||
|
TopicsConfig topicConfig = this.getTopicConfig(cls);
|
||||||
|
|
||||||
|
AdminClient admin = this.of();
|
||||||
|
NewTopic newTopic = new NewTopic(
|
||||||
|
topicConfig.getName(),
|
||||||
|
topicDefaultsConfig.getPartitions(),
|
||||||
|
topicDefaultsConfig.getReplicationFactor()
|
||||||
|
);
|
||||||
|
|
||||||
|
Map<String, String> properties = new HashMap<>();
|
||||||
|
|
||||||
|
if (topicDefaultsConfig.getProperties() != null) {
|
||||||
|
properties.putAll(topicDefaultsConfig.getProperties());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (topicConfig.getProperties() != null) {
|
||||||
|
properties.putAll(topicConfig.getProperties());
|
||||||
|
}
|
||||||
|
|
||||||
|
newTopic.configs(properties);
|
||||||
|
|
||||||
|
try {
|
||||||
|
admin.createTopics(Collections.singletonList(newTopic)).all().get();
|
||||||
|
log.info("Topic '{}' created", newTopic.name());
|
||||||
|
return true;
|
||||||
|
} catch (ExecutionException | InterruptedException e) {
|
||||||
|
if (e.getCause() instanceof TopicExistsException) {
|
||||||
|
try {
|
||||||
|
admin
|
||||||
|
.alterConfigs(new HashMap<>() {{
|
||||||
|
put(
|
||||||
|
new ConfigResource(ConfigResource.Type.TOPIC, newTopic.name()),
|
||||||
|
new org.apache.kafka.clients.admin.Config(
|
||||||
|
newTopic.configs()
|
||||||
|
.entrySet()
|
||||||
|
.stream()
|
||||||
|
.map(config -> new ConfigEntry(config.getKey(), config.getValue()))
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}}).all().get();
|
||||||
|
|
||||||
|
log.info("Topic Config '{}' updated", newTopic.name());
|
||||||
|
|
||||||
|
} catch (InterruptedException | ExecutionException e1) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String getTopicName(Class cls) {
|
||||||
|
return this.getTopicConfig(cls).getName();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,79 @@
|
|||||||
|
package org.floworc.runner.kafka.services;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.kafka.clients.CommonClientConfigs;
|
||||||
|
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||||
|
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
|
||||||
|
import org.apache.kafka.clients.consumer.KafkaConsumer;
|
||||||
|
import org.apache.kafka.common.TopicPartition;
|
||||||
|
import org.apache.kafka.common.serialization.Serde;
|
||||||
|
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||||
|
import org.floworc.runner.kafka.KafkaQueue;
|
||||||
|
import org.floworc.runner.kafka.configs.ClientConfig;
|
||||||
|
import org.floworc.runner.kafka.configs.ConsumerDefaultsConfig;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
@Slf4j
|
||||||
|
public class KafkaConsumerService {
|
||||||
|
@Inject
|
||||||
|
private ClientConfig clientConfig;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private ConsumerDefaultsConfig consumerConfig;
|
||||||
|
|
||||||
|
public <V> Consumer<V> of(Class group, Serde<V> serde) {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.putAll(clientConfig.getProperties());
|
||||||
|
|
||||||
|
if (this.consumerConfig.getProperties() != null) {
|
||||||
|
properties.putAll(consumerConfig.getProperties());
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.put(CommonClientConfigs.CLIENT_ID_CONFIG, KafkaQueue.getConsumerGroupName(group));
|
||||||
|
properties.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaQueue.getConsumerGroupName(group));
|
||||||
|
|
||||||
|
return new Consumer<>(properties, serde);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Consumer<V> extends KafkaConsumer<String, V> {
|
||||||
|
protected Logger logger = LoggerFactory.getLogger(KafkaConsumerService.class);
|
||||||
|
|
||||||
|
private Consumer(Properties properties, Serde<V> valueSerde) {
|
||||||
|
super(properties, new StringDeserializer(), valueSerde.deserializer());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void subscribe(Collection<String> topics) {
|
||||||
|
super.subscribe(topics, new ConsumerRebalanceListener() {
|
||||||
|
@Override
|
||||||
|
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
|
||||||
|
if (log.isTraceEnabled()) {
|
||||||
|
partitions.forEach(topicPartition -> logger.trace(
|
||||||
|
"Revoke partitions for topic {}, partition {}",
|
||||||
|
topicPartition.topic(),
|
||||||
|
topicPartition.partition()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
|
||||||
|
if (log.isTraceEnabled()) {
|
||||||
|
partitions.forEach(topicPartition -> logger.trace(
|
||||||
|
"Switching partitions for topic {}, partition {}",
|
||||||
|
topicPartition.topic(),
|
||||||
|
topicPartition.partition()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
package org.floworc.runner.kafka.services;
|
||||||
|
|
||||||
|
import org.apache.kafka.clients.CommonClientConfigs;
|
||||||
|
import org.apache.kafka.clients.producer.KafkaProducer;
|
||||||
|
import org.apache.kafka.common.serialization.Serde;
|
||||||
|
import org.apache.kafka.common.serialization.StringSerializer;
|
||||||
|
import org.floworc.runner.kafka.KafkaQueue;
|
||||||
|
import org.floworc.runner.kafka.configs.ClientConfig;
|
||||||
|
import org.floworc.runner.kafka.configs.ProducerDefaultsConfig;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
public class KafkaProducerService {
|
||||||
|
@Inject
|
||||||
|
private ClientConfig clientConfig;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private ProducerDefaultsConfig producerConfig;
|
||||||
|
|
||||||
|
public <V> KafkaProducerService.Producer<V> of(Class name, Serde<V> serde) {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.putAll(clientConfig.getProperties());
|
||||||
|
|
||||||
|
if (producerConfig.getProperties() != null) {
|
||||||
|
properties.putAll(producerConfig.getProperties());
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.put(CommonClientConfigs.CLIENT_ID_CONFIG, KafkaQueue.getConsumerGroupName(name));
|
||||||
|
|
||||||
|
return new KafkaProducerService.Producer<>(properties, serde);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Producer<V> extends KafkaProducer<String, V> {
|
||||||
|
private Producer(Properties properties, Serde<V> valueSerde) {
|
||||||
|
super(properties, new StringSerializer(), valueSerde.serializer());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,68 @@
|
|||||||
|
package org.floworc.runner.kafka.services;
|
||||||
|
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import org.apache.kafka.clients.CommonClientConfigs;
|
||||||
|
import org.apache.kafka.streams.KafkaStreams;
|
||||||
|
import org.apache.kafka.streams.StreamsConfig;
|
||||||
|
import org.apache.kafka.streams.Topology;
|
||||||
|
import org.apache.kafka.streams.errors.StreamsException;
|
||||||
|
import org.floworc.runner.kafka.KafkaQueue;
|
||||||
|
import org.floworc.runner.kafka.configs.ClientConfig;
|
||||||
|
import org.floworc.runner.kafka.configs.StreamDefaultsConfig;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Singleton;
|
||||||
|
import javax.validation.constraints.NotNull;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
@Slf4j
|
||||||
|
public class KafkaStreamService {
|
||||||
|
@Inject
|
||||||
|
@NotNull
|
||||||
|
private ClientConfig clientConfig;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private StreamDefaultsConfig streamConfig;
|
||||||
|
|
||||||
|
public KafkaStreamService.Stream of(Class group, Topology topology) {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.putAll(clientConfig.getProperties());
|
||||||
|
|
||||||
|
if (this.streamConfig.getProperties() != null) {
|
||||||
|
properties.putAll(streamConfig.getProperties());
|
||||||
|
}
|
||||||
|
|
||||||
|
properties.put(CommonClientConfigs.CLIENT_ID_CONFIG, KafkaQueue.getConsumerGroupName(group));
|
||||||
|
properties.put(StreamsConfig.APPLICATION_ID_CONFIG, KafkaQueue.getConsumerGroupName(group));
|
||||||
|
|
||||||
|
return new KafkaStreamService.Stream(topology, properties);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Stream extends KafkaStreams {
|
||||||
|
private Stream(Topology topology, Properties props) {
|
||||||
|
super(topology, props);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public synchronized void start() throws IllegalStateException, StreamsException {
|
||||||
|
this.setUncaughtExceptionHandler((thread, e) -> {
|
||||||
|
log.error("Uncaught exception in Kafka Stream " + thread.getName() + ", closing !", e);
|
||||||
|
System.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (log.isTraceEnabled()) {
|
||||||
|
this.setStateListener((newState, oldState) -> {
|
||||||
|
log.trace("Switching stream state from {} to {}", oldState, newState);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||||
|
this.close(Duration.ofSeconds(10));
|
||||||
|
}));
|
||||||
|
|
||||||
|
super.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
package org.floworc.runner.kafka;
|
||||||
|
|
||||||
|
import io.micronaut.test.annotation.MicronautTest;
|
||||||
|
import org.floworc.core.Utils;
|
||||||
|
import org.floworc.core.models.executions.Execution;
|
||||||
|
import org.floworc.core.repositories.LocalFlowRepositoryLoader;
|
||||||
|
import org.floworc.core.runners.RunnerUtils;
|
||||||
|
import org.floworc.core.runners.StandAloneRunner;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
|
||||||
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
|
||||||
|
@MicronautTest
|
||||||
|
class KafkaRunnerTest {
|
||||||
|
@Inject
|
||||||
|
private StandAloneRunner runner;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private RunnerUtils runnerUtils;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private LocalFlowRepositoryLoader repositoryLoader;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
private void init() throws IOException, URISyntaxException {
|
||||||
|
runner.run();
|
||||||
|
Utils.loads(repositoryLoader);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void full() throws TimeoutException {
|
||||||
|
Execution execution = runnerUtils.runOne("full");
|
||||||
|
|
||||||
|
assertThat(execution.getTaskRunList(), hasSize(13));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void errors() throws TimeoutException {
|
||||||
|
Execution execution = runnerUtils.runOne("errors");
|
||||||
|
|
||||||
|
assertThat(execution.getTaskRunList(), hasSize(7));
|
||||||
|
}
|
||||||
|
}
|
||||||
42
runner-kafka/src/test/resources/application.yml
Normal file
42
runner-kafka/src/test/resources/application.yml
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
floworc:
|
||||||
|
queue:
|
||||||
|
type: kafka
|
||||||
|
|
||||||
|
kafka:
|
||||||
|
client:
|
||||||
|
properties:
|
||||||
|
bootstrap.servers: "kafka:9092"
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
topic:
|
||||||
|
properties:
|
||||||
|
compression.type: "lz4"
|
||||||
|
|
||||||
|
consumer:
|
||||||
|
properties:
|
||||||
|
isolation.level: "read_committed"
|
||||||
|
auto.offset.reset: "earliest"
|
||||||
|
enable.auto.commit: "false"
|
||||||
|
|
||||||
|
producer:
|
||||||
|
properties:
|
||||||
|
acks: "all"
|
||||||
|
|
||||||
|
stream:
|
||||||
|
properties:
|
||||||
|
processing.guarantee: "exactly_once"
|
||||||
|
acks: "all"
|
||||||
|
state.dir: "/tmp/floworc/kafka/state"
|
||||||
|
|
||||||
|
topics:
|
||||||
|
org-floworc-core-models-executions-execution:
|
||||||
|
name: "floworc_execution"
|
||||||
|
properties:
|
||||||
|
cleanup.policy: "compact"
|
||||||
|
retention.ms: "-1"
|
||||||
|
|
||||||
|
org-floworc-core-runners-workertask:
|
||||||
|
name: "floworc_workertask"
|
||||||
|
|
||||||
|
org-floworc-core-runners-workertaskresult:
|
||||||
|
name: "floworc_workertaskresult"
|
||||||
@@ -4,5 +4,5 @@ dependencies {
|
|||||||
compile project(":core")
|
compile project(":core")
|
||||||
|
|
||||||
testCompile project(':core').sourceSets.test.output
|
testCompile project(':core').sourceSets.test.output
|
||||||
testCompile project(':repository-local').sourceSets.main.output
|
testCompile project(':repository-memory').sourceSets.main.output
|
||||||
}
|
}
|
||||||
@@ -4,6 +4,7 @@ import io.micronaut.context.annotation.Prototype;
|
|||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.floworc.core.models.executions.Execution;
|
import org.floworc.core.models.executions.Execution;
|
||||||
import org.floworc.core.models.executions.TaskRun;
|
import org.floworc.core.models.executions.TaskRun;
|
||||||
|
import org.floworc.core.queues.QueueFactoryInterface;
|
||||||
import org.floworc.core.queues.QueueInterface;
|
import org.floworc.core.queues.QueueInterface;
|
||||||
import org.floworc.core.runners.ExecutionStateInterface;
|
import org.floworc.core.runners.ExecutionStateInterface;
|
||||||
import org.floworc.core.runners.WorkerTaskResult;
|
import org.floworc.core.runners.WorkerTaskResult;
|
||||||
@@ -12,6 +13,8 @@ import javax.inject.Named;
|
|||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
|
@Prototype
|
||||||
|
@MemoryQueueEnabled
|
||||||
public class MemoryExecutionState implements ExecutionStateInterface {
|
public class MemoryExecutionState implements ExecutionStateInterface {
|
||||||
private final Object lock = new Object();
|
private final Object lock = new Object();
|
||||||
private final QueueInterface<Execution> executionQueue;
|
private final QueueInterface<Execution> executionQueue;
|
||||||
@@ -19,8 +22,8 @@ public class MemoryExecutionState implements ExecutionStateInterface {
|
|||||||
private static ConcurrentHashMap<String, Execution> executions = new ConcurrentHashMap<>();
|
private static ConcurrentHashMap<String, Execution> executions = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
public MemoryExecutionState(
|
public MemoryExecutionState(
|
||||||
@Named("executionQueue") QueueInterface<Execution> executionQueue,
|
@Named(QueueFactoryInterface.EXECUTION_NAMED) QueueInterface<Execution> executionQueue,
|
||||||
@Named("workerTaskResultQueue") QueueInterface<WorkerTaskResult> workerTaskResultQueue
|
@Named(QueueFactoryInterface.WORKERTASKRESULT_NAMED) QueueInterface<WorkerTaskResult> workerTaskResultQueue
|
||||||
) {
|
) {
|
||||||
this.executionQueue = executionQueue;
|
this.executionQueue = executionQueue;
|
||||||
this.workerTaskResultQueue = workerTaskResultQueue;
|
this.workerTaskResultQueue = workerTaskResultQueue;
|
||||||
|
|||||||
@@ -5,14 +5,27 @@ import org.floworc.core.queues.QueueInterface;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.*;
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class MemoryQueue<T> implements QueueInterface<T> {
|
public class MemoryQueue<T> implements QueueInterface<T> {
|
||||||
private Class<T> cls;
|
private Class<T> cls;
|
||||||
private static ExecutorService poolExecutor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
// private static ExecutorService poolExecutor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
||||||
|
private static int threads = Runtime.getRuntime().availableProcessors();
|
||||||
|
private static ExecutorService poolExecutor = new ThreadPoolExecutor(
|
||||||
|
threads,
|
||||||
|
threads,
|
||||||
|
0L, TimeUnit.MILLISECONDS,
|
||||||
|
new LinkedBlockingQueue<>(),
|
||||||
|
new RejectedExecutionHandler() {
|
||||||
|
@Override
|
||||||
|
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
|
||||||
|
log.info("sdfsdf");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
private Map<String, List<Consumer<T>>> consumers = new HashMap<>();
|
private Map<String, List<Consumer<T>>> consumers = new HashMap<>();
|
||||||
|
|
||||||
public MemoryQueue(Class<T> cls) {
|
public MemoryQueue(Class<T> cls) {
|
||||||
@@ -30,17 +43,23 @@ public class MemoryQueue<T> implements QueueInterface<T> {
|
|||||||
poolExecutor.execute(() -> {
|
poolExecutor.execute(() -> {
|
||||||
consumers.get((new Random()).nextInt(consumers.size())).accept(message);
|
consumers.get((new Random()).nextInt(consumers.size())).accept(message);
|
||||||
});
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void receive(Class consumerGroup, Consumer<T> consumer) {
|
public synchronized Runnable receive(Class consumerGroup, Consumer<T> consumer) {
|
||||||
if (!this.consumers.containsKey(consumerGroup.getName())) {
|
if (!this.consumers.containsKey(consumerGroup.getName())) {
|
||||||
this.consumers.put(consumerGroup.getName(), new ArrayList<>());
|
this.consumers.put(consumerGroup.getName(), new ArrayList<>());
|
||||||
}
|
}
|
||||||
|
|
||||||
this.consumers.get(consumerGroup.getName()).add(consumer);
|
synchronized (this) {
|
||||||
|
this.consumers.get(consumerGroup.getName()).add(consumer);
|
||||||
|
int index = this.consumers.get(consumerGroup.getName()).size() - 1;
|
||||||
|
|
||||||
|
return () -> {
|
||||||
|
this.consumers.get(consumerGroup.getName()).remove(index);
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getSubscribersCount() {
|
public int getSubscribersCount() {
|
||||||
@@ -51,11 +70,6 @@ public class MemoryQueue<T> implements QueueInterface<T> {
|
|||||||
.reduce(0, Integer::sum);
|
.reduce(0, Integer::sum);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void ack(T message) {
|
|
||||||
// no ack needed with local queues
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() throws IOException {
|
public void close() throws IOException {
|
||||||
if (!poolExecutor.isShutdown()) {
|
if (!poolExecutor.isShutdown()) {
|
||||||
|
|||||||
@@ -0,0 +1,14 @@
|
|||||||
|
package org.floworc.runner.memory;
|
||||||
|
|
||||||
|
import io.micronaut.context.annotation.DefaultImplementation;
|
||||||
|
import io.micronaut.context.annotation.Requires;
|
||||||
|
|
||||||
|
import java.lang.annotation.*;
|
||||||
|
|
||||||
|
@Documented
|
||||||
|
@Retention(RetentionPolicy.RUNTIME)
|
||||||
|
@Target({ElementType.PACKAGE, ElementType.TYPE})
|
||||||
|
@Requires(property = "floworc.queue.type", value = "memory")
|
||||||
|
@DefaultImplementation
|
||||||
|
public @interface MemoryQueueEnabled {
|
||||||
|
}
|
||||||
@@ -10,21 +10,22 @@ import javax.inject.Named;
|
|||||||
import javax.inject.Singleton;
|
import javax.inject.Singleton;
|
||||||
|
|
||||||
@Factory
|
@Factory
|
||||||
|
@MemoryQueueEnabled
|
||||||
public class MemoryQueueFactory implements QueueFactoryInterface {
|
public class MemoryQueueFactory implements QueueFactoryInterface {
|
||||||
@Singleton
|
@Singleton
|
||||||
@Named("executionQueue")
|
@Named(QueueFactoryInterface.EXECUTION_NAMED)
|
||||||
public QueueInterface<Execution> execution() {
|
public QueueInterface<Execution> execution() {
|
||||||
return new MemoryQueue<>(Execution.class);
|
return new MemoryQueue<>(Execution.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
@Named("workerTaskQueue")
|
@Named(QueueFactoryInterface.WORKERTASK_NAMED)
|
||||||
public QueueInterface<WorkerTask> workerTask() {
|
public QueueInterface<WorkerTask> workerTask() {
|
||||||
return new MemoryQueue<>(WorkerTask.class);
|
return new MemoryQueue<>(WorkerTask.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
@Named("workerTaskResultQueue")
|
@Named(QueueFactoryInterface.WORKERTASKRESULT_NAMED)
|
||||||
public QueueInterface<WorkerTaskResult> workerTaskResult() {
|
public QueueInterface<WorkerTaskResult> workerTaskResult() {
|
||||||
return new MemoryQueue<>(WorkerTaskResult.class);
|
return new MemoryQueue<>(WorkerTaskResult.class);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import org.floworc.core.models.executions.Execution;
|
|||||||
import org.floworc.core.runners.StandAloneRunner;
|
import org.floworc.core.runners.StandAloneRunner;
|
||||||
import org.floworc.core.runners.WorkerTask;
|
import org.floworc.core.runners.WorkerTask;
|
||||||
import org.floworc.core.runners.WorkerTaskResult;
|
import org.floworc.core.runners.WorkerTaskResult;
|
||||||
|
import org.floworc.core.utils.Await;
|
||||||
|
|
||||||
import javax.inject.Singleton;
|
import javax.inject.Singleton;
|
||||||
|
|
||||||
@@ -17,19 +18,10 @@ public class MemoryRunner extends StandAloneRunner {
|
|||||||
|
|
||||||
int processors = Math.max(3, Runtime.getRuntime().availableProcessors());
|
int processors = Math.max(3, Runtime.getRuntime().availableProcessors());
|
||||||
|
|
||||||
// @FIXME: Ugly hack to wait that all thread is created and ready to listen
|
// @FIXME: Ugly hack to wait that all threads is created and ready to listen
|
||||||
boolean isReady;
|
Await.until(() -> ((MemoryQueue<Execution>) this.executionQueue).getSubscribersCount() == processors * 2 &&
|
||||||
do {
|
((MemoryQueue<WorkerTask>) this.workerTaskQueue).getSubscribersCount() == processors &&
|
||||||
try {
|
((MemoryQueue<WorkerTaskResult>) this.workerTaskResultQueue).getSubscribersCount() == processors
|
||||||
Thread.sleep(100);
|
);
|
||||||
} catch (InterruptedException e) {
|
|
||||||
log.error("Can't sleep", e);
|
|
||||||
}
|
|
||||||
|
|
||||||
isReady = ((MemoryQueue<Execution>) this.executionQueue).getSubscribersCount() == processors * 2 &&
|
|
||||||
((MemoryQueue<WorkerTask>) this.workerTaskQueue).getSubscribersCount() == processors &&
|
|
||||||
((MemoryQueue<WorkerTaskResult>) this.workerTaskResultQueue).getSubscribersCount() == processors;
|
|
||||||
}
|
|
||||||
while (!isReady);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,12 +3,15 @@ package org.floworc.runner.memory;
|
|||||||
import io.micronaut.test.annotation.MicronautTest;
|
import io.micronaut.test.annotation.MicronautTest;
|
||||||
import org.floworc.core.Utils;
|
import org.floworc.core.Utils;
|
||||||
import org.floworc.core.models.executions.Execution;
|
import org.floworc.core.models.executions.Execution;
|
||||||
import org.floworc.core.models.flows.Flow;
|
import org.floworc.core.repositories.LocalFlowRepositoryLoader;
|
||||||
import org.floworc.core.runners.StandAloneRunner;
|
import org.floworc.core.runners.RunnerUtils;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import javax.inject.Inject;
|
import javax.inject.Inject;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.hasSize;
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
@@ -16,22 +19,33 @@ import static org.hamcrest.Matchers.hasSize;
|
|||||||
@MicronautTest
|
@MicronautTest
|
||||||
class MemoryRunnerTest {
|
class MemoryRunnerTest {
|
||||||
@Inject
|
@Inject
|
||||||
private StandAloneRunner runner;
|
private MemoryRunner runner;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private RunnerUtils runnerUtils;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
private LocalFlowRepositoryLoader repositoryLoader;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
private void init() throws IOException, URISyntaxException {
|
||||||
|
if (!runner.isRunning()) {
|
||||||
|
runner.run();
|
||||||
|
Utils.loads(repositoryLoader);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void full() throws IOException, InterruptedException {
|
void full() throws TimeoutException {
|
||||||
Flow flow = Utils.parse("flows/full.yaml");
|
Execution execution = runnerUtils.runOne("full");
|
||||||
Execution execution = runner.runOne(flow);
|
|
||||||
|
|
||||||
assertThat(execution.getTaskRunList(), hasSize(13));
|
assertThat(execution.getTaskRunList(), hasSize(13));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void errors() throws IOException, InterruptedException {
|
void errors() throws TimeoutException {
|
||||||
Flow flow = Utils.parse("flows/errors.yaml");
|
Execution execution = runnerUtils.runOne("errors");
|
||||||
Execution execution = runner.runOne(flow);
|
|
||||||
|
|
||||||
assertThat(execution.getTaskRunList(), hasSize(7));
|
assertThat(execution.getTaskRunList(), hasSize(7));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
floworc:
|
floworc:
|
||||||
repository:
|
queue:
|
||||||
local:
|
type: memory
|
||||||
base-path: ../core/src/test/resources/flows/
|
|
||||||
|
|||||||
@@ -6,5 +6,5 @@ include 'core'
|
|||||||
include 'runner-memory'
|
include 'runner-memory'
|
||||||
include 'runner-kafka'
|
include 'runner-kafka'
|
||||||
|
|
||||||
include 'repository-local'
|
include 'repository-memory'
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user