mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-22 19:28:02 -05:00
feat(jdbc): implementation of trigger repository
This commit is contained in:
@@ -10,4 +10,5 @@ dependencies {
|
||||
testImplementation project(':core').sourceSets.test.output
|
||||
testImplementation project(':jdbc').sourceSets.test.output
|
||||
testImplementation project(':runner-memory')
|
||||
testImplementation 'org.mockito:mockito-junit-jupiter:4.5.1'
|
||||
}
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
package io.kestra.repository.postgres;
|
||||
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.repositories.ExecutionRepositoryInterface;
|
||||
import io.kestra.jdbc.repository.AbstractExecutionRepository;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
@Singleton
|
||||
@PostgresRepositoryEnabled
|
||||
public class PostgresExecutionRepository extends AbstractExecutionRepository implements ExecutionRepositoryInterface {
|
||||
@Inject
|
||||
public PostgresExecutionRepository(ApplicationContext applicationContext) {
|
||||
super(new PostgresRepository<>(Execution.class, applicationContext), applicationContext);
|
||||
}
|
||||
}
|
||||
@@ -8,10 +8,7 @@ import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.jooq.Field;
|
||||
import org.jooq.Record;
|
||||
import org.jooq.Record1;
|
||||
import org.jooq.SelectConditionStep;
|
||||
import org.jooq.*;
|
||||
import org.jooq.impl.DSL;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -27,15 +24,14 @@ public class PostgresFlowRepository extends AbstractFlowRepository {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <R extends Record, E> SelectConditionStep<R> fullTextSelect(List<Field<Object>> field) {
|
||||
private <R extends Record, E> SelectConditionStep<R> fullTextSelect(DSLContext context, List<Field<Object>> field) {
|
||||
ArrayList<Field<Object>> fields = new ArrayList<>(Collections.singletonList(DSL.field("value")));
|
||||
|
||||
if (field != null) {
|
||||
fields.addAll(field);
|
||||
}
|
||||
|
||||
return (SelectConditionStep<R>) this.jdbcRepository
|
||||
.getDslContext()
|
||||
return (SelectConditionStep<R>) context
|
||||
.select(fields)
|
||||
.from(lastRevision(false))
|
||||
.join(jdbcRepository.getTable().as("ft"))
|
||||
@@ -47,30 +43,43 @@ public class PostgresFlowRepository extends AbstractFlowRepository {
|
||||
}
|
||||
|
||||
public ArrayListTotal<Flow> find(String query, Pageable pageable) {
|
||||
SelectConditionStep<Record1<Object>> select = this.fullTextSelect(Collections.emptyList());
|
||||
return this.jdbcRepository
|
||||
.getDslContext()
|
||||
.transactionResult(configuration -> {
|
||||
DSLContext context = DSL.using(configuration);
|
||||
|
||||
if (query != null) {
|
||||
select.and(this.jdbcRepository.fullTextCondition(Collections.singletonList("fulltext"), query));
|
||||
}
|
||||
SelectConditionStep<Record1<Object>> select = this.fullTextSelect(context, Collections.emptyList());
|
||||
|
||||
return this.jdbcRepository.fetchPage(select, pageable);
|
||||
if (query != null) {
|
||||
select.and(this.jdbcRepository.fullTextCondition(Collections.singletonList("fulltext"), query));
|
||||
}
|
||||
|
||||
return this.jdbcRepository.fetchPage(context, select, pageable);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public ArrayListTotal<SearchResult<Flow>> findSourceCode(String query, Pageable pageable) {
|
||||
SelectConditionStep<Record> select = this.fullTextSelect(Collections.singletonList(DSL.field("source_code")));
|
||||
return this.jdbcRepository
|
||||
.getDslContext()
|
||||
.transactionResult(configuration -> {
|
||||
DSLContext context = DSL.using(configuration);
|
||||
|
||||
if (query != null) {
|
||||
select.and(DSL.condition("source_code @@ TO_TSQUERY('simple', ?)", query));
|
||||
}
|
||||
SelectConditionStep<Record> select = this.fullTextSelect(context, Collections.singletonList(DSL.field("source_code")));
|
||||
|
||||
return this.jdbcRepository.fetchPage(
|
||||
select,
|
||||
pageable,
|
||||
record -> new SearchResult<>(
|
||||
this.jdbcRepository.map(record),
|
||||
this.jdbcRepository.fragments(query, record.getValue("value", String.class))
|
||||
)
|
||||
);
|
||||
if (query != null) {
|
||||
select.and(DSL.condition("source_code @@ TO_TSQUERY('simple', ?)", query));
|
||||
}
|
||||
|
||||
return this.jdbcRepository.fetchPage(
|
||||
context,
|
||||
select,
|
||||
pageable,
|
||||
record -> new SearchResult<>(
|
||||
this.jdbcRepository.map(record),
|
||||
this.jdbcRepository.fragments(query, record.getValue("value", String.class))
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,8 +11,9 @@ import org.jooq.impl.DSL;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class PostgresRepository<T extends DeletedInterface> extends AbstractJdbcRepository<T> {
|
||||
public class PostgresRepository<T> extends AbstractJdbcRepository<T> {
|
||||
public PostgresRepository(Class<T> cls, ApplicationContext applicationContext) {
|
||||
super(cls, applicationContext);
|
||||
}
|
||||
@@ -27,26 +28,27 @@ public class PostgresRepository<T extends DeletedInterface> extends AbstractJdbc
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void persist(T entity, Map<Field<Object>, Object> fields) {
|
||||
if (fields == null) {
|
||||
fields = this.persistFields(entity);
|
||||
}
|
||||
public void persist(T entity, @Nullable Map<Field<Object>, Object> fields) {
|
||||
Map<Field<Object>, Object> finalFields = fields == null ? this.persistFields(entity) : fields;
|
||||
|
||||
String json = mapper.writeValueAsString(entity);
|
||||
fields.replace(DSL.field("value"), DSL.val(JSONB.valueOf(json)));
|
||||
finalFields.replace(DSL.field("value"), DSL.val(JSONB.valueOf(json)));
|
||||
|
||||
InsertOnDuplicateSetMoreStep<Record> insert = dslContext.insertInto(table)
|
||||
dslContext.transaction(configuration -> DSL
|
||||
.using(configuration)
|
||||
.insertInto(table)
|
||||
.set(DSL.field(DSL.quotedName("key")), queueService.key(entity))
|
||||
.set(fields)
|
||||
.set(finalFields)
|
||||
.onConflict(DSL.field(DSL.quotedName("key")))
|
||||
.doUpdate()
|
||||
.set(fields);
|
||||
.set(finalFields)
|
||||
.execute()
|
||||
);
|
||||
|
||||
insert.execute();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <R extends Record, E> ArrayListTotal<E> fetchPage(SelectConditionStep<R> select, Pageable pageable, RecordMapper<R, E> mapper) {
|
||||
public <R extends Record, E> ArrayListTotal<E> fetchPage(DSLContext context, SelectConditionStep<R> select, Pageable pageable, RecordMapper<R, E> mapper) {
|
||||
Result<Record> results = this.limit(
|
||||
this.dslContext.select(DSL.asterisk(), DSL.count().over().as("total_count"))
|
||||
.from(this
|
||||
|
||||
@@ -1,18 +1,11 @@
|
||||
package io.kestra.repository.postgres;
|
||||
|
||||
import io.kestra.core.models.templates.Template;
|
||||
import io.kestra.core.repositories.ArrayListTotal;
|
||||
import io.kestra.core.repositories.TemplateRepositoryInterface;
|
||||
import io.kestra.jdbc.repository.AbstractTemplateRepository;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import io.micronaut.data.model.Pageable;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.jooq.Record1;
|
||||
import org.jooq.SelectConditionStep;
|
||||
import org.jooq.impl.DSL;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
@Singleton
|
||||
@PostgresRepositoryEnabled
|
||||
@@ -21,20 +14,4 @@ public class PostgresTemplateRepository extends AbstractTemplateRepository imple
|
||||
public PostgresTemplateRepository(ApplicationContext applicationContext) {
|
||||
super(new PostgresRepository<>(Template.class, applicationContext), applicationContext);
|
||||
}
|
||||
|
||||
public ArrayListTotal<Template> find(String query, Pageable pageable) {
|
||||
SelectConditionStep<Record1<Object>> select = this.jdbcRepository
|
||||
.getDslContext()
|
||||
.select(
|
||||
DSL.field("value")
|
||||
)
|
||||
.from(this.jdbcRepository.getTable())
|
||||
.where(this.defaultFilter());
|
||||
|
||||
if (query != null) {
|
||||
select.and(this.jdbcRepository.fullTextCondition(Collections.singletonList("fulltext"), query));
|
||||
}
|
||||
|
||||
return this.jdbcRepository.fetchPage(select, pageable);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
package io.kestra.repository.postgres;
|
||||
|
||||
import io.kestra.core.models.triggers.Trigger;
|
||||
import io.kestra.core.repositories.TriggerRepositoryInterface;
|
||||
import io.kestra.jdbc.repository.AbstractTriggerRepository;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.inject.Singleton;
|
||||
|
||||
@Singleton
|
||||
@PostgresRepositoryEnabled
|
||||
public class PostgresTriggerRepository extends AbstractTriggerRepository implements TriggerRepositoryInterface {
|
||||
@Inject
|
||||
public PostgresTriggerRepository(ApplicationContext applicationContext) {
|
||||
super(new PostgresRepository<>(Trigger.class, applicationContext));
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,34 @@
|
||||
CREATE TYPE state_type AS ENUM (
|
||||
'CREATED',
|
||||
'RUNNING',
|
||||
'PAUSED',
|
||||
'RESTARTED',
|
||||
'KILLING',
|
||||
'SUCCESS',
|
||||
'WARNING',
|
||||
'FAILED',
|
||||
'KILLED'
|
||||
);
|
||||
|
||||
CREATE TYPE queue_consumers AS ENUM (
|
||||
'indexer',
|
||||
'executor',
|
||||
'worker'
|
||||
);
|
||||
|
||||
CREATE TYPE queue_type AS ENUM (
|
||||
'io.kestra.core.models.executions.Execution',
|
||||
'io.kestra.core.models.flows.Flow',
|
||||
'io.kestra.core.models.templates.Template',
|
||||
'io.kestra.core.models.executions.ExecutionKilled',
|
||||
'io.kestra.core.runners.WorkerTask',
|
||||
'io.kestra.core.runners.WorkerTaskResult',
|
||||
'io.kestra.core.runners.WorkerInstance',
|
||||
'io.kestra.core.runners.WorkerTaskRunning',
|
||||
'io.kestra.core.models.executions.LogEntry',
|
||||
'io.kestra.core.models.triggers.Trigger'
|
||||
);
|
||||
|
||||
CREATE OR REPLACE FUNCTION FULLTEXT_REPLACE(text, text) RETURNS text
|
||||
AS 'SELECT REGEXP_REPLACE(COALESCE($1, ''''), ''[^a-zA-Z\d:]'', $2, ''g'');'
|
||||
LANGUAGE SQL
|
||||
@@ -16,39 +47,34 @@ AS 'SELECT TO_TSQUERY(''simple'', FULLTEXT_REPLACE($1, '':* & '') || '':*'');'
|
||||
IMMUTABLE
|
||||
RETURNS NULL ON NULL INPUT;
|
||||
|
||||
CREATE OR REPLACE FUNCTION STATE_FROMTEXT(text) RETURNS state_type
|
||||
AS 'SELECT CAST($1 AS state_type);'
|
||||
LANGUAGE SQL
|
||||
IMMUTABLE;
|
||||
|
||||
CREATE TYPE ${prefix}queue_consumers AS ENUM (
|
||||
'indexer',
|
||||
'executor',
|
||||
'worker'
|
||||
);
|
||||
CREATE OR REPLACE FUNCTION PARSE_ISO8601_DATETIME(text) RETURNS timestamp
|
||||
AS 'SELECT $1::timestamp;'
|
||||
LANGUAGE SQL
|
||||
IMMUTABLE;
|
||||
|
||||
CREATE TYPE ${prefix}queue_type AS ENUM (
|
||||
'io.kestra.core.models.executions.Execution',
|
||||
'io.kestra.core.models.flows.Flow',
|
||||
'io.kestra.core.models.templates.Template',
|
||||
'io.kestra.core.models.executions.ExecutionKilled',
|
||||
'io.kestra.core.runners.WorkerTask',
|
||||
'io.kestra.core.runners.WorkerTaskResult',
|
||||
'io.kestra.core.runners.WorkerInstance',
|
||||
'io.kestra.core.runners.WorkerTaskRunning',
|
||||
'io.kestra.core.models.executions.LogEntry',
|
||||
'io.kestra.core.models.triggers.Trigger'
|
||||
);
|
||||
CREATE OR REPLACE FUNCTION PARSE_ISO8601_DURATION(text) RETURNS interval
|
||||
AS 'SELECT $1::interval;'
|
||||
LANGUAGE SQL
|
||||
IMMUTABLE;
|
||||
|
||||
CREATE TABLE ${prefix}queues (
|
||||
CREATE TABLE queues (
|
||||
"offset" SERIAL PRIMARY KEY,
|
||||
type ${prefix}queue_type NOT NULL,
|
||||
type queue_type NOT NULL,
|
||||
key VARCHAR(250) NOT NULL,
|
||||
value JSONB NOT NULL,
|
||||
consumers ${prefix}queue_consumers[]
|
||||
consumers queue_consumers[]
|
||||
);
|
||||
|
||||
CREATE INDEX ${prefix}queues_key ON ${prefix}queues (type, key);
|
||||
CREATE INDEX ${prefix}queues_consumers ON ${prefix}queues (type, consumers);
|
||||
CREATE INDEX queues_key ON queues (type, key);
|
||||
CREATE INDEX queues_consumers ON queues (type, consumers);
|
||||
|
||||
|
||||
CREATE TABLE ${prefix}flows (
|
||||
CREATE TABLE flows (
|
||||
key VARCHAR(250) NOT NULL PRIMARY KEY,
|
||||
value JSONB NOT NULL,
|
||||
deleted BOOL NOT NULL GENERATED ALWAYS AS (CAST(value ->> 'deleted' AS BOOL)) STORED,
|
||||
@@ -62,15 +88,15 @@ CREATE TABLE ${prefix}flows (
|
||||
source_code TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX ${prefix}flows_id ON ${prefix}flows (id);
|
||||
CREATE INDEX ${prefix}flows_namespace ON ${prefix}flows (namespace);
|
||||
CREATE INDEX ${prefix}flows_revision ON ${prefix}flows (revision);
|
||||
CREATE INDEX ${prefix}flows_deleted ON ${prefix}flows (deleted);
|
||||
CREATE INDEX ${prefix}flows_fulltext ON ${prefix}flows USING GIN (fulltext);
|
||||
CREATE INDEX ${prefix}flows_source_code ON ${prefix}flows USING GIN (FULLTEXT_INDEX(source_code));
|
||||
CREATE INDEX flows_id ON flows (id);
|
||||
CREATE INDEX flows_namespace ON flows (namespace);
|
||||
CREATE INDEX flows_revision ON flows (revision);
|
||||
CREATE INDEX flows_deleted ON flows (deleted);
|
||||
CREATE INDEX flows_fulltext ON flows USING GIN (fulltext);
|
||||
CREATE INDEX flows_source_code ON flows USING GIN (FULLTEXT_INDEX(source_code));
|
||||
|
||||
|
||||
CREATE TABLE ${prefix}templates (
|
||||
CREATE TABLE templates (
|
||||
key VARCHAR(250) NOT NULL PRIMARY KEY,
|
||||
value JSONB NOT NULL,
|
||||
deleted BOOL NOT NULL GENERATED ALWAYS AS (CAST(value ->> 'deleted' AS BOOL)) STORED,
|
||||
@@ -82,7 +108,45 @@ CREATE TABLE ${prefix}templates (
|
||||
)) STORED
|
||||
);
|
||||
|
||||
CREATE INDEX ${prefix}templates_namespace ON ${prefix}flows (namespace);
|
||||
CREATE INDEX ${prefix}templates_revision ON ${prefix}flows (revision);
|
||||
CREATE INDEX ${prefix}templates_deleted ON ${prefix}flows (deleted);
|
||||
CREATE INDEX ${prefix}templates_fulltext ON ${prefix}templates USING GIN (fulltext);
|
||||
CREATE INDEX templates_namespace ON flows (namespace);
|
||||
CREATE INDEX templates_revision ON flows (revision);
|
||||
CREATE INDEX templates_deleted ON flows (deleted);
|
||||
CREATE INDEX templates_fulltext ON templates USING GIN (fulltext);
|
||||
|
||||
|
||||
CREATE TABLE executions (
|
||||
key VARCHAR(250) NOT NULL PRIMARY KEY,
|
||||
value JSONB NOT NULL,
|
||||
deleted BOOL NOT NULL GENERATED ALWAYS AS (CAST(value ->> 'deleted' AS bool)) STORED,
|
||||
id VARCHAR(100) NOT NULL GENERATED ALWAYS AS (value ->> 'id') STORED,
|
||||
namespace VARCHAR(150) NOT NULL GENERATED ALWAYS AS (value ->> 'namespace') STORED,
|
||||
flow_id VARCHAR(150) NOT NULL GENERATED ALWAYS AS (value ->> 'flowId') STORED,
|
||||
state_current state_type NOT NULL GENERATED ALWAYS AS (STATE_FROMTEXT(value #>> '{state, current}')) STORED,
|
||||
state_duration BIGINT NOT NULL GENERATED ALWAYS AS (EXTRACT(MILLISECONDS FROM PARSE_ISO8601_DURATION(value #>> '{state, duration}'))) STORED,
|
||||
start_date TIMESTAMP NOT NULL GENERATED ALWAYS AS (PARSE_ISO8601_DATETIME(value #>> '{state, startDate}')) STORED,
|
||||
fulltext TSVECTOR GENERATED ALWAYS AS (
|
||||
FULLTEXT_INDEX(CAST(value ->> 'namespace' AS varchar)) ||
|
||||
FULLTEXT_INDEX(CAST(value ->> 'flowId' AS varchar)) ||
|
||||
FULLTEXT_INDEX(CAST(value ->> 'id' AS varchar))
|
||||
) STORED
|
||||
);
|
||||
|
||||
CREATE INDEX executions_id ON executions (id);
|
||||
CREATE INDEX executions_namespace ON executions (namespace);
|
||||
CREATE INDEX executions_flowId ON executions (flow_id);
|
||||
CREATE INDEX executions_state_current ON executions (state_current);
|
||||
CREATE INDEX executions_start_date ON executions (start_date);
|
||||
CREATE INDEX executions_state_duration ON executions (state_duration);
|
||||
CREATE INDEX executions_deleted ON executions (deleted);
|
||||
CREATE INDEX executions_fulltext ON executions USING GIN (fulltext);
|
||||
|
||||
|
||||
CREATE TABLE triggers (
|
||||
key VARCHAR(250) NOT NULL PRIMARY KEY,
|
||||
value JSONB NOT NULL,
|
||||
namespace VARCHAR(150) NOT NULL GENERATED ALWAYS AS (value ->> 'namespace') STORED,
|
||||
flow_id VARCHAR(150) NOT NULL GENERATED ALWAYS AS (value ->> 'flowId') STORED,
|
||||
trigger_id VARCHAR(150) NOT NULL GENERATED ALWAYS AS (value ->> 'triggerId') STORED
|
||||
);
|
||||
|
||||
CREATE INDEX triggers_namespace__flow_id__trigger_id ON triggers (namespace, flow_id, trigger_id);
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
package io.kestra.repository.postgres;
|
||||
|
||||
import io.kestra.jdbc.repository.AbstractJdbcExecutionRepositoryTest;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class PostgresExecutionRepositoryTest extends AbstractJdbcExecutionRepositoryTest {
|
||||
@Test
|
||||
protected void findTaskRun() {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
protected void taskRunsDailyStatistics() {
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
package io.kestra.repository.postgres;
|
||||
|
||||
import io.kestra.jdbc.repository.AbstractJdbcTriggerRepositoryTest;
|
||||
|
||||
public class PostgresTriggerRepositoryTest extends AbstractJdbcTriggerRepositoryTest {
|
||||
|
||||
}
|
||||
@@ -12,8 +12,6 @@ flyway:
|
||||
enabled: true
|
||||
locations:
|
||||
- classpath:migrations/postgres
|
||||
placeholders:
|
||||
prefix: ""
|
||||
|
||||
kestra:
|
||||
queue:
|
||||
@@ -22,22 +20,21 @@ kestra:
|
||||
type: postgres
|
||||
|
||||
jdbc:
|
||||
table-prefix: ""
|
||||
tables:
|
||||
queues:
|
||||
table: "${kestra.jdbc.table-prefix}queues"
|
||||
table: "queues"
|
||||
flows:
|
||||
table: "${kestra.jdbc.table-prefix}flows"
|
||||
table: "flows"
|
||||
cls: io.kestra.core.models.flows.Flow
|
||||
executions:
|
||||
table: "${kestra.jdbc.table-prefix}executions"
|
||||
table: "executions"
|
||||
cls: io.kestra.core.models.executions.Execution
|
||||
templates:
|
||||
table: "${kestra.jdbc.table-prefix}templates"
|
||||
table: "templates"
|
||||
cls: io.kestra.core.models.templates.Template
|
||||
triggers:
|
||||
table: "${kestra.jdbc.table-prefix}triggers"
|
||||
table: "triggers"
|
||||
cls: io.kestra.core.models.triggers.Trigger
|
||||
logs:
|
||||
table: "${kestra.jdbc.table-prefix}logs"
|
||||
table: "logs"
|
||||
cls: io.kestra.core.models.executions.LogEntry
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
mock-maker-inline
|
||||
Reference in New Issue
Block a user