mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-19 18:05:41 -05:00
fix(webserver): export test with correct path + DateField for execution export (#13197)
This commit is contained in:
@@ -238,8 +238,8 @@ public abstract class AbstractJdbcExecutionRepository extends AbstractJdbcCrudRe
|
||||
private Condition computeFindCondition(@Nullable List<QueryFilter> filters) {
|
||||
boolean hasKindFilter = filters != null && filters.stream()
|
||||
.anyMatch(f -> KIND.value().equalsIgnoreCase(f.field().name()) );
|
||||
return hasKindFilter ? this.filter(filters, "start_date", Resource.EXECUTION) :
|
||||
this.filter(filters, "start_date", Resource.EXECUTION).and(NORMAL_KIND_CONDITION);
|
||||
return hasKindFilter ? this.filter(filters, fieldsMapping.get(dateFilterField()), Resource.EXECUTION) :
|
||||
this.filter(filters, fieldsMapping.get(dateFilterField()), Resource.EXECUTION).and(NORMAL_KIND_CONDITION);
|
||||
}
|
||||
|
||||
private SelectConditionStep<Record1<Object>> findSelect(
|
||||
@@ -292,7 +292,7 @@ public abstract class AbstractJdbcExecutionRepository extends AbstractJdbcCrudRe
|
||||
if (filters == null || filters.isEmpty()) {
|
||||
return findAllAsync(tenantId);
|
||||
}
|
||||
Condition condition = this.filter(filters, null, Resource.EXECUTION);
|
||||
Condition condition = this.filter(filters, fieldsMapping.get(dateFilterField()) , Resource.EXECUTION);
|
||||
return findAsync(defaultFilter(tenantId), condition);
|
||||
}
|
||||
|
||||
@@ -468,7 +468,7 @@ public abstract class AbstractJdbcExecutionRepository extends AbstractJdbcCrudRe
|
||||
@Nullable DateUtils.GroupType groupBy,
|
||||
@Nullable List<State.Type> state
|
||||
) {
|
||||
List<Field<?>> dateFields = new ArrayList<>(groupByFields(Duration.between(startDate, endDate), "start_date", groupBy));
|
||||
List<Field<?>> dateFields = new ArrayList<>(groupByFields(Duration.between(startDate, endDate), fieldsMapping.get(dateFilterField()), groupBy));
|
||||
List<Field<?>> selectFields = new ArrayList<>(fields);
|
||||
selectFields.addAll(List.of(
|
||||
DSL.count().as("count"),
|
||||
@@ -476,7 +476,7 @@ public abstract class AbstractJdbcExecutionRepository extends AbstractJdbcCrudRe
|
||||
DSL.max(field("state_duration", Long.class)).as("duration_max"),
|
||||
DSL.sum(field("state_duration", Long.class)).as("duration_sum")
|
||||
));
|
||||
selectFields.addAll(groupByFields(Duration.between(startDate, endDate), "start_date", groupBy, true));
|
||||
selectFields.addAll(groupByFields(Duration.between(startDate, endDate), fieldsMapping.get(dateFilterField()), groupBy, true));
|
||||
|
||||
return jdbcRepository
|
||||
.getDslContextWrapper()
|
||||
|
||||
@@ -520,7 +520,7 @@ class ExecutionControllerTest {
|
||||
createAndExecuteFlow();
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(
|
||||
HttpRequest.GET("/api/v1/main/executions/export"),
|
||||
HttpRequest.GET("/api/v1/main/executions/export/by-query/csv"),
|
||||
byte[].class
|
||||
);
|
||||
|
||||
|
||||
@@ -1115,12 +1115,13 @@ class FlowControllerTest {
|
||||
);
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(
|
||||
HttpRequest.GET(FLOW_PATH + "/export"),
|
||||
HttpRequest.GET(FLOW_PATH + "/export/by-query/csv"),
|
||||
byte[].class
|
||||
);
|
||||
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.OK.getCode());
|
||||
assertThat(response.getHeaders().get("Content-Disposition")).contains("attachment; filename=flows.csv");
|
||||
|
||||
String csv = new String(response.body());
|
||||
assertThat(csv).contains("id");
|
||||
assertThat(csv).contains(f1.getId());
|
||||
|
||||
@@ -623,7 +623,7 @@ class TriggerControllerTest {
|
||||
jdbcTriggerRepository.save(t2);
|
||||
|
||||
HttpResponse<byte[]> response = client.toBlocking().exchange(
|
||||
HttpRequest.GET(TRIGGER_PATH + "/export"),
|
||||
HttpRequest.GET(TRIGGER_PATH + "/export/by-query/csv"),
|
||||
byte[].class
|
||||
);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user