mirror of
https://github.com/kestra-io/kestra.git
synced 2025-12-26 14:00:23 -05:00
Compare commits
23 Commits
run-develo
...
v1.0.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bbd0dda47e | ||
|
|
27a8e8b5a7 | ||
|
|
d6620a34cd | ||
|
|
6f8b3c5cfd | ||
|
|
6da6cbab60 | ||
|
|
a899e16178 | ||
|
|
568cd0b0c7 | ||
|
|
92e1dcb6eb | ||
|
|
499e040cd0 | ||
|
|
5916831d62 | ||
|
|
0b1b55957e | ||
|
|
7ee40d376a | ||
|
|
e2c9b3e256 | ||
|
|
556730777b | ||
|
|
c1a75a431f | ||
|
|
4a5b91667a | ||
|
|
f7b2af16a1 | ||
|
|
9351cb22e0 | ||
|
|
b1ecb82fdc | ||
|
|
c6d56151eb | ||
|
|
ed4398467a | ||
|
|
c51947419a | ||
|
|
ccb6a1f4a7 |
29
.github/actions/plugins-list/action.yml
vendored
29
.github/actions/plugins-list/action.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: 'Load Kestra Plugin List'
|
||||
description: 'Composite action to load list of plugins'
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "Kestra version"
|
||||
default: 'LATEST'
|
||||
required: true
|
||||
plugin-file:
|
||||
description: "File of the plugins"
|
||||
default: './.plugins'
|
||||
required: true
|
||||
outputs:
|
||||
plugins:
|
||||
description: "List of all Kestra plugins"
|
||||
value: ${{ steps.plugins.outputs.plugins }}
|
||||
repositories:
|
||||
description: "List of all Kestra repositories of plugins"
|
||||
value: ${{ steps.plugins.outputs.repositories }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get Plugins List
|
||||
id: plugins
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | sed -e "s/LATEST/${{ inputs.plugin-version }}/g" | cut -d':' -f2- | xargs || echo '');
|
||||
REPOSITORIES=$([ -f ${{ inputs.plugin-file }} ] && cat ${{ inputs.plugin-file }} | grep "io\\.kestra\\." | sed -e '/#/s/^.//' | cut -d':' -f1 | uniq | sort | xargs || echo '')
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
echo "repositories=$REPOSITORIES" >> $GITHUB_OUTPUT
|
||||
4
.github/workflows/main.yml
vendored
4
.github/workflows/main.yml
vendored
@@ -31,7 +31,7 @@ concurrency:
|
||||
jobs:
|
||||
tests:
|
||||
name: Execute tests
|
||||
uses: ./.github/workflows/workflow-test.yml
|
||||
uses: kestra-io/kestra/.github/workflows/workflow-test.yml@develop
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
with:
|
||||
report-status: false
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
name: Release
|
||||
needs: [tests]
|
||||
if: "!failure() && !cancelled() && !startsWith(github.ref, 'refs/heads/releases')"
|
||||
uses: ./.github/workflows/workflow-release.yml
|
||||
uses: kestra-io/kestra/.github/workflows/workflow-release.yml@develop
|
||||
with:
|
||||
plugin-version: ${{ inputs.plugin-version != '' && inputs.plugin-version || (github.ref == 'refs/heads/develop' && 'LATEST-SNAPSHOT' || 'LATEST') }}
|
||||
secrets:
|
||||
|
||||
142
.github/workflows/workflow-backend-test.yml
vendored
142
.github/workflows/workflow-backend-test.yml
vendored
@@ -1,142 +0,0 @@
|
||||
name: Backend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
SONAR_TOKEN:
|
||||
description: 'Sonar Token'
|
||||
required: true
|
||||
GOOGLE_SERVICE_ACCOUNT:
|
||||
description: 'Google Service Account'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
checks: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Backend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
name: Checkout - Current ref
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
python-enabled: true
|
||||
|
||||
# Services
|
||||
- name: Setup - Start docker compose
|
||||
shell: bash
|
||||
run: docker compose -f docker-compose-ci.yml up -d
|
||||
|
||||
# Gradle check
|
||||
- name: Gradle - Build
|
||||
if: ${{ github.event.inputs.skip-test == 'false' || github.event.inputs.skip-test == '' }}
|
||||
env:
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo $GOOGLE_SERVICE_ACCOUNT | base64 -d > ~/.gcp-service-account.json
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=$HOME/.gcp-service-account.json
|
||||
./gradlew check javadoc --parallel
|
||||
|
||||
# report test
|
||||
- name: Test - Publish Test Results
|
||||
uses: dorny/test-reporter@v2
|
||||
if: always()
|
||||
with:
|
||||
name: Java Tests Report
|
||||
reporter: java-junit
|
||||
path: '**/build/test-results/test/TEST-*.xml'
|
||||
list-suites: 'failed'
|
||||
list-tests: 'failed'
|
||||
fail-on-error: 'false'
|
||||
token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
|
||||
# Sonar
|
||||
- name: Test - Analyze with Sonar
|
||||
if: env.SONAR_TOKEN != ''
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
shell: bash
|
||||
run: ./gradlew sonar --info
|
||||
|
||||
# GCP
|
||||
- name: GCP - Auth with unit test account
|
||||
id: auth
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
uses: "google-github-actions/auth@v3"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GOOGLE_SERVICE_ACCOUNT }}"
|
||||
|
||||
- name: GCP - Setup Cloud SDK
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
uses: "google-github-actions/setup-gcloud@v3"
|
||||
|
||||
# Allure check
|
||||
- uses: rlespinasse/github-slug-action@v5
|
||||
name: Allure - Generate slug variables
|
||||
|
||||
- name: Allure - Publish report
|
||||
uses: andrcuns/allure-publish-action@v2.9.0
|
||||
if: always() && env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
JAVA_HOME: /usr/lib/jvm/default-jvm/
|
||||
with:
|
||||
storageType: gcs
|
||||
resultsGlob: "**/build/allure-results"
|
||||
bucket: internal-kestra-host
|
||||
baseUrl: "https://internal.dev.kestra.io"
|
||||
prefix: ${{ format('{0}/{1}', github.repository, 'allure/java') }}
|
||||
copyLatest: true
|
||||
ignoreMissingResults: true
|
||||
|
||||
# Jacoco
|
||||
- name: Jacoco - Copy reports
|
||||
if: env.GOOGLE_SERVICE_ACCOUNT != ''
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
mv build/reports/jacoco/testCodeCoverageReport build/reports/jacoco/test/
|
||||
mv build/reports/jacoco/test/testCodeCoverageReport.xml build/reports/jacoco/test/jacocoTestReport.xml
|
||||
gsutil -m rsync -d -r build/reports/jacoco/test/ gs://internal-kestra-host/${{ format('{0}/{1}', github.repository, 'jacoco') }}
|
||||
|
||||
# Codecov
|
||||
- name: Codecov - Upload coverage reports
|
||||
uses: codecov/codecov-action@v5
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
|
||||
- name: Codecov - Upload test results
|
||||
uses: codecov/test-results-action@v1
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: backend
|
||||
80
.github/workflows/workflow-build-artifacts.yml
vendored
80
.github/workflows/workflow-build-artifacts.yml
vendored
@@ -1,80 +0,0 @@
|
||||
name: Build Artifacts
|
||||
|
||||
on:
|
||||
workflow_call: {}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build - Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
docker-tag: ${{ steps.vars.outputs.tag }}
|
||||
docker-artifact-name: ${{ steps.vars.outputs.artifact }}
|
||||
plugins: ${{ steps.plugins.outputs.plugins }}
|
||||
env:
|
||||
PLUGIN_VERSION: ${{ github.event.inputs.plugin-version != null && github.event.inputs.plugin-version || 'LATEST' }}
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# Npm
|
||||
- name: Setup - Npm install
|
||||
shell: bash
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
# Setup build
|
||||
- uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
name: Setup - Build
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Get Plugins List
|
||||
- name: Plugins - Get List
|
||||
uses: ./.github/actions/plugins-list
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: plugins-list
|
||||
with:
|
||||
plugin-version: ${{ env.PLUGIN_VERSION }}
|
||||
|
||||
# Set Plugins List
|
||||
- name: Plugins - Set List
|
||||
id: plugins
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
shell: bash
|
||||
run: |
|
||||
PLUGINS="${{ steps.plugins-list.outputs.plugins }}"
|
||||
TAG=${GITHUB_REF#refs/*/}
|
||||
if [[ $TAG = "master" || $TAG == v* ]]; then
|
||||
echo "plugins=$PLUGINS" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plugins=--repositories=https://central.sonatype.com/repository/maven-snapshots/ $PLUGINS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Build
|
||||
- name: Gradle - Build
|
||||
shell: bash
|
||||
run: |
|
||||
./gradlew executableJar
|
||||
|
||||
- name: Artifacts - Copy exe to image
|
||||
shell: bash
|
||||
run: |
|
||||
cp build/executable/* docker/app/kestra && chmod +x docker/app/kestra
|
||||
|
||||
# Upload artifacts
|
||||
- name: Artifacts - Upload JAR
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jar
|
||||
path: build/libs/
|
||||
|
||||
- name: Artifacts - Upload Executable
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable/
|
||||
70
.github/workflows/workflow-frontend-test.yml
vendored
70
.github/workflows/workflow-frontend-test.yml
vendored
@@ -1,70 +0,0 @@
|
||||
name: Frontend - Tests
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN:
|
||||
description: "The GitHub Token."
|
||||
required: true
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov Token'
|
||||
required: true
|
||||
|
||||
env:
|
||||
# to save corepack from itself
|
||||
COREPACK_INTEGRITY_KEYS: 0
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Frontend - Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Cache Node Modules
|
||||
id: cache-node-modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
ui/node_modules
|
||||
key: modules-${{ hashFiles('ui/package-lock.json') }}
|
||||
|
||||
- name: Cache Playwright Binaries
|
||||
id: cache-playwright
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/ms-playwright
|
||||
key: playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
|
||||
- name: Npm - install
|
||||
if: steps.cache-node-modules.outputs.cache-hit != 'true'
|
||||
working-directory: ui
|
||||
run: npm ci
|
||||
|
||||
- name: Npm - lint
|
||||
uses: reviewdog/action-eslint@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_AUTH_TOKEN }}
|
||||
reporter: github-pr-review
|
||||
workdir: ui
|
||||
|
||||
- name: Npm - Run build
|
||||
working-directory: ui
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: npm run build
|
||||
|
||||
- name: Run front-end unit tests
|
||||
working-directory: ui
|
||||
run: npm run test:unit -- --coverage
|
||||
|
||||
- name: Storybook - Install Playwright
|
||||
working-directory: ui
|
||||
if: steps.cache-playwright.outputs.cache-hit != 'true'
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Run storybook component tests
|
||||
working-directory: ui
|
||||
run: npm run test:storybook -- --coverage
|
||||
88
.github/workflows/workflow-github-release.yml
vendored
88
.github/workflows/workflow-github-release.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Github - Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "The Github personal token."
|
||||
required: true
|
||||
SLACK_RELEASES_WEBHOOK_URL:
|
||||
description: "The Slack webhook URL."
|
||||
required: true
|
||||
|
||||
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Github - Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check out
|
||||
- name: Checkout - Repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
# Checkout GitHub Actions
|
||||
- name: Checkout - Actions
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: kestra-io/actions
|
||||
sparse-checkout-cone-mode: true
|
||||
path: actions
|
||||
sparse-checkout: |
|
||||
.github/actions
|
||||
|
||||
# Download Exec
|
||||
# Must be done after checkout actions
|
||||
- name: Artifacts - Download executable
|
||||
uses: actions/download-artifact@v5
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
with:
|
||||
name: exe
|
||||
path: build/executable
|
||||
|
||||
- name: Check if current tag is latest
|
||||
id: is_latest
|
||||
run: |
|
||||
latest_tag=$(git tag | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | sed 's/^v//' | sort -V | tail -n1)
|
||||
current_tag="${GITHUB_REF_NAME#v}"
|
||||
if [ "$current_tag" = "$latest_tag" ]; then
|
||||
echo "latest=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "latest=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
env:
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
|
||||
# GitHub Release
|
||||
- name: Create GitHub release
|
||||
uses: ./actions/.github/actions/github-release
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
env:
|
||||
MAKE_LATEST: ${{ steps.is_latest.outputs.latest }}
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
|
||||
# Trigger gha workflow to bump helm chart version
|
||||
- name: GitHub - Trigger the Helm chart version bump
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
repository: kestra-io/helm-charts
|
||||
event-type: update-helm-chart-version
|
||||
client-payload: |-
|
||||
{
|
||||
"new_version": "${{ github.ref_name }}",
|
||||
"github_repository": "${{ github.repository }}",
|
||||
"github_actor": "${{ github.actor }}"
|
||||
}
|
||||
|
||||
- name: Merge Release Notes
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
uses: ./actions/.github/actions/github-release-note-merge
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
19
.github/workflows/workflow-publish-docker.yml
vendored
19
.github/workflows/workflow-publish-docker.yml
vendored
@@ -11,6 +11,14 @@ on:
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
retag-lts:
|
||||
description: 'Retag LTS Docker images'
|
||||
required: true
|
||||
type: choice
|
||||
default: "false"
|
||||
options:
|
||||
- "true"
|
||||
- "false"
|
||||
release-tag:
|
||||
description: 'Kestra Release Tag (by default, deduced with the ref)'
|
||||
required: false
|
||||
@@ -62,7 +70,7 @@ jobs:
|
||||
|
||||
# Get Plugins List
|
||||
- name: Get Plugins List
|
||||
uses: ./.github/actions/plugins-list
|
||||
uses: kestra-io/kestra/.github/actions/plugins-list@develop
|
||||
id: plugins
|
||||
with: # remap LATEST-SNAPSHOT to LATEST
|
||||
plugin-version: ${{ env.PLUGIN_VERSION == 'LATEST-SNAPSHOT' && 'LATEST' || env.PLUGIN_VERSION }}
|
||||
@@ -73,7 +81,7 @@ jobs:
|
||||
build-artifacts:
|
||||
name: Build Artifacts
|
||||
if: ${{ inputs.force-download-artifact == 'true' }}
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
uses: kestra-io/kestra/.github/workflows/workflow-build-artifacts.yml@develop
|
||||
|
||||
docker:
|
||||
name: Publish Docker
|
||||
@@ -179,6 +187,11 @@ jobs:
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest{0}', matrix.image.name) }}
|
||||
|
||||
- name: Retag to LTS
|
||||
if: startsWith(github.ref, 'refs/tags/v') && inputs.retag-lts == 'true'
|
||||
run: |
|
||||
regctl image copy ${{ format('kestra/kestra:{0}{1}', steps.vars.outputs.tag, matrix.image.name) }} ${{ format('kestra/kestra:latest-lts{0}', matrix.image.name) }}
|
||||
|
||||
end:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
@@ -197,4 +210,4 @@ jobs:
|
||||
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
name: GitHub Actions
|
||||
icon_emoji: ':github-actions:'
|
||||
channel: 'C02DQ1A7JLR' # _int_git channel
|
||||
channel: 'C02DQ1A7JLR' # _int_git channel
|
||||
|
||||
57
.github/workflows/workflow-publish-maven.yml
vendored
57
.github/workflows/workflow-publish-maven.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Publish - Maven
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish - Maven
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout - Current ref
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Setup build
|
||||
- name: Setup - Build
|
||||
uses: kestra-io/actions/.github/actions/setup-build@main
|
||||
id: build
|
||||
with:
|
||||
java-enabled: true
|
||||
node-enabled: true
|
||||
|
||||
# Publish
|
||||
- name: Publish - Release package to Maven Central
|
||||
shell: bash
|
||||
env:
|
||||
ORG_GRADLE_PROJECT_mavenCentralUsername: ${{ secrets.SONATYPE_USER }}
|
||||
ORG_GRADLE_PROJECT_mavenCentralPassword: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE}}
|
||||
run: |
|
||||
mkdir -p ~/.gradle/
|
||||
echo "signing.keyId=${SONATYPE_GPG_KEYID}" > ~/.gradle/gradle.properties
|
||||
echo "signing.password=${SONATYPE_GPG_PASSWORD}" >> ~/.gradle/gradle.properties
|
||||
echo "signing.secretKeyRingFile=${HOME}/.gradle/secring.gpg" >> ~/.gradle/gradle.properties
|
||||
echo ${SONATYPE_GPG_FILE} | base64 -d > ~/.gradle/secring.gpg
|
||||
./gradlew publishToMavenCentral
|
||||
|
||||
# Gradle dependency
|
||||
- name: Java - Gradle dependency graph
|
||||
uses: gradle/actions/dependency-submission@v4
|
||||
85
.github/workflows/workflow-release.yml
vendored
85
.github/workflows/workflow-release.yml
vendored
@@ -1,85 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
publish-docker:
|
||||
description: "Publish Docker image"
|
||||
default: 'false'
|
||||
required: false
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plugin-version:
|
||||
description: "plugins version"
|
||||
default: 'LATEST'
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
description: "The Dockerhub username."
|
||||
required: true
|
||||
DOCKERHUB_PASSWORD:
|
||||
description: "The Dockerhub password."
|
||||
required: true
|
||||
SONATYPE_USER:
|
||||
description: "The Sonatype username."
|
||||
required: true
|
||||
SONATYPE_PASSWORD:
|
||||
description: "The Sonatype password."
|
||||
required: true
|
||||
SONATYPE_GPG_KEYID:
|
||||
description: "The Sonatype GPG key id."
|
||||
required: true
|
||||
SONATYPE_GPG_PASSWORD:
|
||||
description: "The Sonatype GPG password."
|
||||
required: true
|
||||
SONATYPE_GPG_FILE:
|
||||
description: "The Sonatype GPG file."
|
||||
required: true
|
||||
GH_PERSONAL_TOKEN:
|
||||
description: "GH personnal Token."
|
||||
required: true
|
||||
SLACK_RELEASES_WEBHOOK_URL:
|
||||
description: "Slack webhook for releases channel."
|
||||
required: true
|
||||
jobs:
|
||||
build-artifacts:
|
||||
name: Build - Artifacts
|
||||
uses: ./.github/workflows/workflow-build-artifacts.yml
|
||||
|
||||
Docker:
|
||||
name: Publish Docker
|
||||
needs: build-artifacts
|
||||
uses: ./.github/workflows/workflow-publish-docker.yml
|
||||
if: github.ref == 'refs/heads/develop' || inputs.publish-docker == 'true'
|
||||
with:
|
||||
force-download-artifact: 'false'
|
||||
plugin-version: ${{ inputs.plugin-version != null && inputs.plugin-version || 'LATEST' }}
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
Maven:
|
||||
name: Publish Maven
|
||||
uses: ./.github/workflows/workflow-publish-maven.yml
|
||||
secrets:
|
||||
SONATYPE_USER: ${{ secrets.SONATYPE_USER }}
|
||||
SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
|
||||
SONATYPE_GPG_KEYID: ${{ secrets.SONATYPE_GPG_KEYID }}
|
||||
SONATYPE_GPG_PASSWORD: ${{ secrets.SONATYPE_GPG_PASSWORD }}
|
||||
SONATYPE_GPG_FILE: ${{ secrets.SONATYPE_GPG_FILE }}
|
||||
|
||||
Github:
|
||||
name: Github Release
|
||||
needs: build-artifacts
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: ./.github/workflows/workflow-github-release.yml
|
||||
secrets:
|
||||
GH_PERSONAL_TOKEN: ${{ secrets.GH_PERSONAL_TOKEN }}
|
||||
SLACK_RELEASES_WEBHOOK_URL: ${{ secrets.SLACK_RELEASES_WEBHOOK_URL }}
|
||||
97
.github/workflows/workflow-test.yml
vendored
97
.github/workflows/workflow-test.yml
vendored
@@ -1,97 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * 1,2,3,4,5'
|
||||
workflow_call:
|
||||
inputs:
|
||||
report-status:
|
||||
description: "Report status of the jobs in outputs"
|
||||
type: string
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
frontend_status:
|
||||
description: "Status of the frontend job"
|
||||
value: ${{ jobs.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status:
|
||||
description: "Status of the backend job"
|
||||
value: ${{ jobs.set-backend-status.outputs.backend_status }}
|
||||
|
||||
jobs:
|
||||
file-changes:
|
||||
name: File changes detection
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
outputs:
|
||||
ui: ${{ steps.changes.outputs.ui }}
|
||||
backend: ${{ steps.changes.outputs.backend }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
- uses: dorny/paths-filter@v3
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
ui:
|
||||
- 'ui/**'
|
||||
backend:
|
||||
- '!{ui,.github}/**'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
frontend:
|
||||
name: Frontend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.ui == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-frontend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
||||
backend:
|
||||
name: Backend - Tests
|
||||
needs: file-changes
|
||||
if: "needs.file-changes.outputs.backend == 'true' || startsWith(github.ref, 'refs/tags/v')"
|
||||
uses: ./.github/workflows/workflow-backend-test.yml
|
||||
secrets:
|
||||
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
GOOGLE_SERVICE_ACCOUNT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT }}
|
||||
|
||||
# Output every job status
|
||||
# To be used in other workflows
|
||||
report-status:
|
||||
name: Report Status
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: always() && (inputs.report-status == 'true')
|
||||
outputs:
|
||||
frontend_status: ${{ steps.set-frontend-status.outputs.frontend_status }}
|
||||
backend_status: ${{ steps.set-backend-status.outputs.backend_status }}
|
||||
steps:
|
||||
- id: set-frontend-status
|
||||
name: Set frontend job status
|
||||
run: echo "::set-output name=frontend_status::${{ needs.frontend.result }}"
|
||||
|
||||
- id: set-backend-status
|
||||
name: Set backend job status
|
||||
run: echo "::set-output name=backend_status::${{ needs.backend.result }}"
|
||||
|
||||
notify:
|
||||
name: Notify - Slack
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ frontend, backend ]
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- name: Notify failed CI
|
||||
id: send-ci-failed
|
||||
if: |
|
||||
always() && (needs.frontend.result != 'success' ||
|
||||
needs.backend.result != 'success')
|
||||
uses: kestra-io/actions/.github/actions/send-ci-failed@main
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
@@ -3,30 +3,88 @@ package io.kestra.core.events;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.context.ServerRequestContext;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
|
||||
@AllArgsConstructor
|
||||
import java.util.Objects;
|
||||
|
||||
@Getter
|
||||
public class CrudEvent<T> {
|
||||
T model;
|
||||
private final T model;
|
||||
@Nullable
|
||||
T previousModel;
|
||||
CrudEventType type;
|
||||
HttpRequest<?> request;
|
||||
|
||||
private final T previousModel;
|
||||
private final CrudEventType type;
|
||||
private final HttpRequest<?> request;
|
||||
|
||||
/**
|
||||
* Static helper method for creating a new {@link CrudEventType#UPDATE} CrudEvent.
|
||||
*
|
||||
* @param model the new created model.
|
||||
* @param <T> type of the model.
|
||||
* @return the new {@link CrudEvent}.
|
||||
*/
|
||||
public static <T> CrudEvent<T> create(T model) {
|
||||
Objects.requireNonNull(model, "Can't create CREATE event with a null model");
|
||||
return new CrudEvent<>(model, null, CrudEventType.CREATE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for creating a new {@link CrudEventType#DELETE} CrudEvent.
|
||||
*
|
||||
* @param model the deleted model.
|
||||
* @param <T> type of the model.
|
||||
* @return the new {@link CrudEvent}.
|
||||
*/
|
||||
public static <T> CrudEvent<T> delete(T model) {
|
||||
Objects.requireNonNull(model, "Can't create DELETE event with a null model");
|
||||
return new CrudEvent<>(null, model, CrudEventType.DELETE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static helper method for creating a new CrudEvent.
|
||||
*
|
||||
* @param before the model before the update.
|
||||
* @param after the model after the update.
|
||||
* @param <T> type of the model.
|
||||
* @return the new {@link CrudEvent}.
|
||||
*/
|
||||
public static <T> CrudEvent<T> of(T before, T after) {
|
||||
|
||||
if (before == null && after == null) {
|
||||
throw new IllegalArgumentException("Both before and after cannot be null");
|
||||
}
|
||||
|
||||
if (before == null) {
|
||||
return create(after);
|
||||
}
|
||||
|
||||
if (after == null) {
|
||||
return delete(before);
|
||||
}
|
||||
|
||||
return new CrudEvent<>(after, before, CrudEventType.UPDATE);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use the static factory methods.
|
||||
*/
|
||||
@Deprecated
|
||||
public CrudEvent(T model, CrudEventType type) {
|
||||
this.model = model;
|
||||
this.type = type;
|
||||
this.previousModel = null;
|
||||
this.request = ServerRequestContext.currentRequest().orElse(null);
|
||||
this(
|
||||
CrudEventType.DELETE.equals(type) ? null : model,
|
||||
CrudEventType.DELETE.equals(type) ? model : null,
|
||||
type,
|
||||
ServerRequestContext.currentRequest().orElse(null)
|
||||
);
|
||||
}
|
||||
|
||||
public CrudEvent(T model, T previousModel, CrudEventType type) {
|
||||
this(model, previousModel, type, ServerRequestContext.currentRequest().orElse(null));
|
||||
}
|
||||
|
||||
public CrudEvent(T model, T previousModel, CrudEventType type, HttpRequest<?> request) {
|
||||
this.model = model;
|
||||
this.previousModel = previousModel;
|
||||
this.type = type;
|
||||
this.request = ServerRequestContext.currentRequest().orElse(null);
|
||||
this.request = request;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -6,6 +6,12 @@ import lombok.Getter;
|
||||
import lombok.ToString;
|
||||
|
||||
import java.net.URL;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Enumeration;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
import java.util.zip.CRC32;
|
||||
|
||||
@AllArgsConstructor
|
||||
@Getter
|
||||
@@ -14,5 +20,59 @@ import java.net.URL;
|
||||
public class ExternalPlugin {
|
||||
private final URL location;
|
||||
private final URL[] resources;
|
||||
private final long crc32;
|
||||
private volatile Long crc32; // lazy-val
|
||||
|
||||
public ExternalPlugin(URL location, URL[] resources) {
|
||||
this.location = location;
|
||||
this.resources = resources;
|
||||
}
|
||||
|
||||
public Long getCrc32() {
|
||||
if (this.crc32 == null) {
|
||||
synchronized (this) {
|
||||
if (this.crc32 == null) {
|
||||
this.crc32 = computeJarCrc32(location);
|
||||
}
|
||||
}
|
||||
}
|
||||
return crc32;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a CRC32 of the JAR File without reading the whole file
|
||||
*
|
||||
* @param location of the JAR File.
|
||||
* @return the CRC32 of {@code -1} if the checksum can't be computed.
|
||||
*/
|
||||
private static long computeJarCrc32(final URL location) {
|
||||
CRC32 crc = new CRC32();
|
||||
try (JarFile jar = new JarFile(location.toURI().getPath(), false)) {
|
||||
Enumeration<JarEntry> entries = jar.entries();
|
||||
byte[] buffer = new byte[Long.BYTES]; // reusable buffer to avoid re-allocation
|
||||
|
||||
while (entries.hasMoreElements()) {
|
||||
JarEntry entry = entries.nextElement();
|
||||
crc.update(entry.getName().getBytes(StandardCharsets.UTF_8));
|
||||
updateCrc32WithLong(crc, buffer, entry.getSize());
|
||||
updateCrc32WithLong(crc, buffer, entry.getCrc());
|
||||
}
|
||||
|
||||
return crc.getValue();
|
||||
} catch (Exception e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
private static void updateCrc32WithLong(CRC32 crc32, byte[] reusable, long val) {
|
||||
// fast long -> byte conversion
|
||||
reusable[0] = (byte) (val >>> 56);
|
||||
reusable[1] = (byte) (val >>> 48);
|
||||
reusable[2] = (byte) (val >>> 40);
|
||||
reusable[3] = (byte) (val >>> 32);
|
||||
reusable[4] = (byte) (val >>> 24);
|
||||
reusable[5] = (byte) (val >>> 16);
|
||||
reusable[6] = (byte) (val >>> 8);
|
||||
reusable[7] = (byte) val;
|
||||
crc32.update(reusable);;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@ public class PluginClassLoader extends URLClassLoader {
|
||||
+ "|dev.failsafe"
|
||||
+ "|reactor"
|
||||
+ "|io.opentelemetry"
|
||||
+ "|io.netty"
|
||||
+ ")\\..*$");
|
||||
|
||||
private final ClassLoader parent;
|
||||
|
||||
@@ -51,8 +51,7 @@ public class PluginResolver {
|
||||
final List<URL> resources = resolveUrlsForPluginPath(path);
|
||||
plugins.add(new ExternalPlugin(
|
||||
path.toUri().toURL(),
|
||||
resources.toArray(new URL[0]),
|
||||
computeJarCrc32(path)
|
||||
resources.toArray(new URL[0])
|
||||
));
|
||||
}
|
||||
} catch (final InvalidPathException | MalformedURLException e) {
|
||||
@@ -124,33 +123,5 @@ public class PluginResolver {
|
||||
|
||||
return urls;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compute a CRC32 of the JAR File without reading the whole file
|
||||
*
|
||||
* @param location of the JAR File.
|
||||
* @return the CRC32 of {@code -1} if the checksum can't be computed.
|
||||
*/
|
||||
private static long computeJarCrc32(final Path location) {
|
||||
CRC32 crc = new CRC32();
|
||||
try (JarFile jar = new JarFile(location.toFile(), false)) {
|
||||
Enumeration<JarEntry> entries = jar.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
JarEntry entry = entries.nextElement();
|
||||
crc.update(entry.getName().getBytes());
|
||||
crc.update(longToBytes(entry.getSize()));
|
||||
crc.update(longToBytes(entry.getCrc()));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return -1;
|
||||
}
|
||||
return crc.getValue();
|
||||
}
|
||||
|
||||
private static byte[] longToBytes(long x) {
|
||||
ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES);
|
||||
buffer.putLong(x);
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.Input;
|
||||
import io.kestra.core.models.flows.State;
|
||||
import io.kestra.core.models.flows.input.SecretInput;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.property.PropertyContext;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
@@ -282,15 +283,15 @@ public final class RunVariables {
|
||||
|
||||
if (flow != null && flow.getInputs() != null) {
|
||||
// we add default inputs value from the flow if not already set, this will be useful for triggers
|
||||
flow.getInputs().stream()
|
||||
.filter(input -> input.getDefaults() != null && !inputs.containsKey(input.getId()))
|
||||
.forEach(input -> {
|
||||
try {
|
||||
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, propertyContext));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
throw new RuntimeException("Unable to inject default value for input '" + input.getId() + "'", e);
|
||||
}
|
||||
});
|
||||
flow.getInputs().stream()
|
||||
.filter(input -> input.getDefaults() != null && !inputs.containsKey(input.getId()))
|
||||
.forEach(input -> {
|
||||
try {
|
||||
inputs.put(input.getId(), FlowInputOutput.resolveDefaultValue(input, propertyContext));
|
||||
} catch (IllegalVariableEvaluationException e) {
|
||||
// Silent catch, if an input depends on another input, or a variable that is populated at runtime / input filling time, we can't resolve it here.
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (!inputs.isEmpty()) {
|
||||
|
||||
@@ -172,22 +172,19 @@ public final class JacksonMapper {
|
||||
|
||||
return Pair.of(patchPrevToNew, patchNewToPrev);
|
||||
}
|
||||
|
||||
public static String applyPatches(Object object, List<JsonNode> patches) throws JsonProcessingException {
|
||||
|
||||
public static JsonNode applyPatchesOnJsonNode(JsonNode jsonObject, List<JsonNode> patches) {
|
||||
for (JsonNode patch : patches) {
|
||||
try {
|
||||
// Required for ES
|
||||
if (patch.findValue("value") == null) {
|
||||
((ObjectNode) patch.get(0)).set("value", (JsonNode) null);
|
||||
((ObjectNode) patch.get(0)).set("value", null);
|
||||
}
|
||||
JsonNode current = MAPPER.valueToTree(object);
|
||||
object = JsonPatch.fromJson(patch).apply(current);
|
||||
jsonObject = JsonPatch.fromJson(patch).apply(jsonObject);
|
||||
} catch (IOException | JsonPatchException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return MAPPER.writeValueAsString(object);
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -3,12 +3,7 @@ package io.kestra.core.services;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.kestra.core.exceptions.FlowProcessingException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.FlowId;
|
||||
import io.kestra.core.models.flows.FlowInterface;
|
||||
import io.kestra.core.models.flows.FlowWithException;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.flows.GenericFlow;
|
||||
import io.kestra.core.models.flows.*;
|
||||
import io.kestra.core.models.tasks.RunnableTask;
|
||||
import io.kestra.core.models.topologies.FlowTopology;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
@@ -30,16 +25,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -551,23 +537,24 @@ public class FlowService {
|
||||
return expandAll ? recursiveFlowTopology(new ArrayList<>(), tenant, namespace, id, destinationOnly) : flowTopologyRepository.get().findByFlow(tenant, namespace, id, destinationOnly).stream();
|
||||
}
|
||||
|
||||
private Stream<FlowTopology> recursiveFlowTopology(List<FlowId> flowIds, String tenantId, String namespace, String id, boolean destinationOnly) {
|
||||
private Stream<FlowTopology> recursiveFlowTopology(List<String> visitedTopologies, String tenantId, String namespace, String id, boolean destinationOnly) {
|
||||
if (flowTopologyRepository.isEmpty()) {
|
||||
throw noRepositoryException();
|
||||
}
|
||||
|
||||
List<FlowTopology> flowTopologies = flowTopologyRepository.get().findByFlow(tenantId, namespace, id, destinationOnly);
|
||||
|
||||
FlowId flowId = FlowId.of(tenantId, namespace, id, null);
|
||||
if (flowIds.contains(flowId)) {
|
||||
return flowTopologies.stream();
|
||||
}
|
||||
flowIds.add(flowId);
|
||||
var flowTopologies = flowTopologyRepository.get().findByFlow(tenantId, namespace, id, destinationOnly);
|
||||
|
||||
return flowTopologies.stream()
|
||||
.flatMap(topology -> Stream.of(topology.getDestination(), topology.getSource()))
|
||||
// recursively fetch child nodes
|
||||
.flatMap(node -> recursiveFlowTopology(flowIds, node.getTenantId(), node.getNamespace(), node.getId(), destinationOnly));
|
||||
// ignore already visited topologies
|
||||
.filter(x -> !visitedTopologies.contains(x.uid()))
|
||||
.flatMap(topology -> {
|
||||
visitedTopologies.add(topology.uid());
|
||||
Stream<FlowTopology> subTopologies = Stream
|
||||
.of(topology.getDestination(), topology.getSource())
|
||||
// recursively visit children and parents nodes
|
||||
.flatMap(relationNode -> recursiveFlowTopology(visitedTopologies, relationNode.getTenantId(), relationNode.getNamespace(), relationNode.getId(), destinationOnly));
|
||||
return Stream.concat(Stream.of(topology), subTopologies);
|
||||
});
|
||||
}
|
||||
|
||||
private IllegalStateException noRepositoryException() {
|
||||
|
||||
@@ -18,6 +18,7 @@ import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.FlowTopologyRepositoryInterface;
|
||||
import io.kestra.core.services.ConditionService;
|
||||
import io.kestra.core.utils.ListUtils;
|
||||
import io.kestra.core.utils.MapUtils;
|
||||
import io.kestra.plugin.core.condition.*;
|
||||
import io.micronaut.core.annotation.Nullable;
|
||||
import jakarta.inject.Inject;
|
||||
@@ -175,9 +176,6 @@ public class FlowTopologyService {
|
||||
protected boolean isTriggerChild(Flow parent, Flow child) {
|
||||
List<AbstractTrigger> triggers = ListUtils.emptyOnNull(child.getTriggers());
|
||||
|
||||
// simulated execution: we add a "simulated" label so conditions can know that the evaluation is for a simulated execution
|
||||
Execution execution = Execution.newExecution(parent, (f, e) -> null, List.of(SIMULATED_EXECUTION), Optional.empty());
|
||||
|
||||
// keep only flow trigger
|
||||
List<io.kestra.plugin.core.trigger.Flow> flowTriggers = triggers
|
||||
.stream()
|
||||
@@ -189,13 +187,16 @@ public class FlowTopologyService {
|
||||
return false;
|
||||
}
|
||||
|
||||
// simulated execution: we add a "simulated" label so conditions can know that the evaluation is for a simulated execution
|
||||
Execution execution = Execution.newExecution(parent, (f, e) -> null, List.of(SIMULATED_EXECUTION), Optional.empty());
|
||||
|
||||
boolean conditionMatch = flowTriggers
|
||||
.stream()
|
||||
.flatMap(flow -> ListUtils.emptyOnNull(flow.getConditions()).stream())
|
||||
.allMatch(condition -> validateCondition(condition, parent, execution));
|
||||
|
||||
boolean preconditionMatch = flowTriggers.stream()
|
||||
.anyMatch(flow -> flow.getPreconditions() == null || validateMultipleConditions(flow.getPreconditions().getConditions(), parent, execution));
|
||||
.anyMatch(flow -> flow.getPreconditions() == null || validatePreconditions(flow.getPreconditions(), parent, execution));
|
||||
|
||||
return conditionMatch && preconditionMatch;
|
||||
}
|
||||
@@ -239,11 +240,24 @@ public class FlowTopologyService {
|
||||
}
|
||||
|
||||
private boolean isMandatoryMultipleCondition(Condition condition) {
|
||||
return Stream
|
||||
.of(
|
||||
Expression.class
|
||||
)
|
||||
.anyMatch(aClass -> condition.getClass().isAssignableFrom(aClass));
|
||||
return condition.getClass().isAssignableFrom(Expression.class);
|
||||
}
|
||||
|
||||
private boolean validatePreconditions(io.kestra.plugin.core.trigger.Flow.Preconditions preconditions, FlowInterface child, Execution execution) {
|
||||
boolean upstreamFlowMatched = MapUtils.emptyOnNull(preconditions.getUpstreamFlowsConditions())
|
||||
.values()
|
||||
.stream()
|
||||
.filter(c -> !isFilterCondition(c))
|
||||
.anyMatch(c -> validateCondition(c, child, execution));
|
||||
|
||||
boolean whereMatched = MapUtils.emptyOnNull(preconditions.getWhereConditions())
|
||||
.values()
|
||||
.stream()
|
||||
.filter(c -> !isFilterCondition(c))
|
||||
.allMatch(c -> validateCondition(c, child, execution));
|
||||
|
||||
// to be a dependency, if upstream flow is set it must be either inside it so it's a AND between upstream flow and where
|
||||
return upstreamFlowMatched && whereMatched;
|
||||
}
|
||||
|
||||
private boolean isFilterCondition(Condition condition) {
|
||||
|
||||
@@ -206,22 +206,17 @@ public class MapUtils {
|
||||
|
||||
/**
|
||||
* Utility method that flatten a nested map.
|
||||
* <p>
|
||||
* NOTE: for simplicity, this method didn't allow to flatten maps with conflicting keys that would end up in different flatten keys,
|
||||
* this could be related later if needed by flattening {k1: k2: {k3: v1}, k1: {k4: v2}} to {k1.k2.k3: v1, k1.k4: v2} is prohibited for now.
|
||||
*
|
||||
* @param nestedMap the nested map.
|
||||
* @return the flattened map.
|
||||
*
|
||||
* @throws IllegalArgumentException if any entry contains a map of more than one element.
|
||||
*/
|
||||
public static Map<String, Object> nestedToFlattenMap(@NotNull Map<String, Object> nestedMap) {
|
||||
Map<String, Object> result = new TreeMap<>();
|
||||
|
||||
for (Map.Entry<String, Object> entry : nestedMap.entrySet()) {
|
||||
if (entry.getValue() instanceof Map<?, ?> map) {
|
||||
Map.Entry<String, Object> flatten = flattenEntry(entry.getKey(), (Map<String, Object>) map);
|
||||
result.put(flatten.getKey(), flatten.getValue());
|
||||
Map<String, Object> flatten = flattenEntry(entry.getKey(), (Map<String, Object>) map);
|
||||
result.putAll(flatten);
|
||||
} else {
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
@@ -229,18 +224,19 @@ public class MapUtils {
|
||||
return result;
|
||||
}
|
||||
|
||||
private static Map.Entry<String, Object> flattenEntry(String key, Map<String, Object> value) {
|
||||
if (value.size() > 1) {
|
||||
throw new IllegalArgumentException("You cannot flatten a map with an entry that is a map of more than one element, conflicting key: " + key);
|
||||
private static Map<String, Object> flattenEntry(String key, Map<String, Object> value) {
|
||||
Map<String, Object> result = new TreeMap<>();
|
||||
|
||||
for (Map.Entry<String, Object> entry : value.entrySet()) {
|
||||
String newKey = key + "." + entry.getKey();
|
||||
Object newValue = entry.getValue();
|
||||
if (newValue instanceof Map<?, ?> map) {
|
||||
result.putAll(flattenEntry(newKey, (Map<String, Object>) map));
|
||||
} else {
|
||||
result.put(newKey, newValue);
|
||||
}
|
||||
}
|
||||
|
||||
Map.Entry<String, Object> entry = value.entrySet().iterator().next();
|
||||
String newKey = key + "." + entry.getKey();
|
||||
Object newValue = entry.getValue();
|
||||
if (newValue instanceof Map<?, ?> map) {
|
||||
return flattenEntry(newKey, (Map<String, Object>) map);
|
||||
} else {
|
||||
return Map.entry(newKey, newValue);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,7 +202,7 @@ import static io.kestra.core.utils.Rethrow.throwPredicate;
|
||||
code = """
|
||||
id: sentry_execution_example
|
||||
namespace: company.team
|
||||
|
||||
|
||||
tasks:
|
||||
- id: send_alert
|
||||
type: io.kestra.plugin.notifications.sentry.SentryExecution
|
||||
@@ -221,7 +221,7 @@ import static io.kestra.core.utils.Rethrow.throwPredicate;
|
||||
- WARNING
|
||||
- type: io.kestra.plugin.core.condition.ExecutionNamespace
|
||||
namespace: company.payroll
|
||||
prefix: false"""
|
||||
prefix: false"""
|
||||
)
|
||||
|
||||
},
|
||||
@@ -405,6 +405,28 @@ public class Flow extends AbstractTrigger implements TriggerOutput<Flow.Output>
|
||||
return conditions;
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public Map<String, Condition> getUpstreamFlowsConditions() {
|
||||
AtomicInteger conditionId = new AtomicInteger();
|
||||
return ListUtils.emptyOnNull(flows).stream()
|
||||
.map(upstreamFlow -> Map.entry(
|
||||
"condition_" + conditionId.incrementAndGet(),
|
||||
new UpstreamFlowCondition(upstreamFlow)
|
||||
))
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public Map<String, Condition> getWhereConditions() {
|
||||
AtomicInteger conditionId = new AtomicInteger();
|
||||
return ListUtils.emptyOnNull(where).stream()
|
||||
.map(filter -> Map.entry(
|
||||
"condition_" + conditionId.incrementAndGet() + "_" + filter.getId(),
|
||||
new FilterCondition(filter)
|
||||
))
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Logger logger() {
|
||||
return log;
|
||||
|
||||
121
core/src/test/java/io/kestra/core/events/CrudEventTest.java
Normal file
121
core/src/test/java/io/kestra/core/events/CrudEventTest.java
Normal file
@@ -0,0 +1,121 @@
|
||||
package io.kestra.core.events;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
|
||||
import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
|
||||
|
||||
class CrudEventTest {
|
||||
|
||||
@Test
|
||||
void shouldReturnCreateEventWhenModelIsProvided() {
|
||||
// Given
|
||||
String model = "testModel";
|
||||
|
||||
// When
|
||||
CrudEvent<String> event = CrudEvent.create(model);
|
||||
|
||||
// Then
|
||||
assertThat(event.getModel()).isEqualTo(model);
|
||||
assertThat(event.getPreviousModel()).isNull();
|
||||
assertThat(event.getType()).isEqualTo(CrudEventType.CREATE);
|
||||
assertThat(event.getRequest()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldThrowExceptionWhenCreateEventWithNullModel() {
|
||||
// Given
|
||||
String model = null;
|
||||
|
||||
// When / Then
|
||||
assertThatThrownBy(() -> CrudEvent.create(model))
|
||||
.isInstanceOf(NullPointerException.class)
|
||||
.hasMessage("Can't create CREATE event with a null model");
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReturnDeleteEventWhenModelIsProvided() {
|
||||
// Given
|
||||
String model = "testModel";
|
||||
|
||||
// When
|
||||
CrudEvent<String> event = CrudEvent.delete(model);
|
||||
|
||||
// Then
|
||||
assertThat(event.getModel()).isNull();
|
||||
assertThat(event.getPreviousModel()).isEqualTo(model);
|
||||
assertThat(event.getType()).isEqualTo(CrudEventType.DELETE);
|
||||
assertThat(event.getRequest()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldThrowExceptionWhenDeleteEventWithNullModel() {
|
||||
// Given
|
||||
String model = null;
|
||||
|
||||
// When / Then
|
||||
assertThatThrownBy(() -> CrudEvent.delete(model))
|
||||
.isInstanceOf(NullPointerException.class)
|
||||
.hasMessage("Can't create DELETE event with a null model");
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReturnUpdateEventWhenBeforeAndAfterAreProvided() {
|
||||
// Given
|
||||
String before = "oldModel";
|
||||
String after = "newModel";
|
||||
|
||||
// When
|
||||
CrudEvent<String> event = CrudEvent.of(before, after);
|
||||
|
||||
// Then
|
||||
assertThat(event.getModel()).isEqualTo(after);
|
||||
assertThat(event.getPreviousModel()).isEqualTo(before);
|
||||
assertThat(event.getType()).isEqualTo(CrudEventType.UPDATE);
|
||||
assertThat(event.getRequest()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReturnCreateEventWhenBeforeIsNullAndAfterIsProvided() {
|
||||
// Given
|
||||
String before = null;
|
||||
String after = "newModel";
|
||||
|
||||
// When
|
||||
CrudEvent<String> event = CrudEvent.of(before, after);
|
||||
|
||||
// Then
|
||||
assertThat(event.getModel()).isEqualTo(after);
|
||||
assertThat(event.getPreviousModel()).isNull();
|
||||
assertThat(event.getType()).isEqualTo(CrudEventType.CREATE);
|
||||
assertThat(event.getRequest()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldReturnDeleteEventWhenAfterIsNullAndBeforeIsProvided() {
|
||||
// Given
|
||||
String before = "oldModel";
|
||||
String after = null;
|
||||
|
||||
// When
|
||||
CrudEvent<String> event = CrudEvent.of(before, after);
|
||||
|
||||
// Then
|
||||
assertThat(event.getModel()).isNull();
|
||||
assertThat(event.getPreviousModel()).isEqualTo(before);
|
||||
assertThat(event.getType()).isEqualTo(CrudEventType.DELETE);
|
||||
assertThat(event.getRequest()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldThrowExceptionWhenBothBeforeAndAfterAreNull() {
|
||||
// Given
|
||||
String before = null;
|
||||
String after = null;
|
||||
|
||||
// When / Then
|
||||
assertThatThrownBy(() -> CrudEvent.of(before, after))
|
||||
.isInstanceOf(IllegalArgumentException.class)
|
||||
.hasMessage("Both before and after cannot be null");
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,24 @@
|
||||
package io.kestra.core.runners;
|
||||
|
||||
import io.kestra.core.exceptions.IllegalVariableEvaluationException;
|
||||
import io.kestra.core.models.executions.Execution;
|
||||
import io.kestra.core.models.flows.DependsOn;
|
||||
import io.kestra.core.models.flows.Flow;
|
||||
import io.kestra.core.models.flows.Type;
|
||||
import io.kestra.core.models.flows.input.BoolInput;
|
||||
import io.kestra.core.models.property.Property;
|
||||
import io.kestra.core.models.property.PropertyContext;
|
||||
import io.kestra.core.models.tasks.Task;
|
||||
import io.kestra.core.models.triggers.AbstractTrigger;
|
||||
import io.kestra.core.runners.pebble.functions.SecretFunction;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.micronaut.context.ApplicationContext;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
@@ -112,4 +123,25 @@ class RunVariablesTest {
|
||||
assertThat(kestra.get("environment")).isEqualTo("test");
|
||||
assertThat(kestra.get("url")).isEqualTo("http://localhost:8080");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void nonResolvableDynamicInputsShouldBeSkipped() throws IllegalVariableEvaluationException {
|
||||
Map<String, Object> variables = new RunVariables.DefaultBuilder()
|
||||
.withFlow(Flow
|
||||
.builder()
|
||||
.namespace("a.b")
|
||||
.id("c")
|
||||
.inputs(List.of(
|
||||
BoolInput.builder().id("a").type(Type.BOOL).defaults(Property.ofValue(true)).build(),
|
||||
BoolInput.builder().id("b").type(Type.BOOL).dependsOn(new DependsOn(List.of("a"), null)).defaults(Property.ofExpression("{{inputs.a == true}}")).build()
|
||||
))
|
||||
.build()
|
||||
)
|
||||
.withExecution(Execution.builder().id(IdUtils.create()).build())
|
||||
.build(new RunContextLogger(), PropertyContext.create(new VariableRenderer(Mockito.mock(ApplicationContext.class), Mockito.mock(VariableRenderer.VariableConfiguration.class), Collections.emptyList())));
|
||||
|
||||
Assertions.assertEquals(Map.of(
|
||||
"a", true
|
||||
), variables.get("inputs"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
package io.kestra.core.serializers;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
import lombok.NoArgsConstructor;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junitpioneer.jupiter.DefaultTimeZone;
|
||||
import org.junitpioneer.jupiter.RetryingTest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
@@ -86,6 +87,36 @@ class JacksonMapperTest {
|
||||
assertThat(deserialize.getZonedDateTime().toEpochSecond()).isEqualTo(original.getZonedDateTime().toEpochSecond());
|
||||
assertThat(deserialize.getZonedDateTime().getOffset()).isEqualTo(original.getZonedDateTime().getOffset());
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldComputeDiffGivenCreatedObject() {
|
||||
Pair<JsonNode, JsonNode> value = JacksonMapper.getBiDirectionalDiffs(null, new DummyObject("value"));
|
||||
// patch
|
||||
assertThat(value.getLeft().toString()).isEqualTo("[{\"op\":\"replace\",\"path\":\"\",\"value\":{\"value\":\"value\"}}]");
|
||||
// Revert
|
||||
assertThat(value.getRight().toString()).isEqualTo("[{\"op\":\"replace\",\"path\":\"\",\"value\":null}]");
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldComputeDiffGivenUpdatedObject() {
|
||||
Pair<JsonNode, JsonNode> value = JacksonMapper.getBiDirectionalDiffs(new DummyObject("before"), new DummyObject("after"));
|
||||
// patch
|
||||
assertThat(value.getLeft().toString()).isEqualTo("[{\"op\":\"replace\",\"path\":\"/value\",\"value\":\"after\"}]");
|
||||
// Revert
|
||||
assertThat(value.getRight().toString()).isEqualTo("[{\"op\":\"replace\",\"path\":\"/value\",\"value\":\"before\"}]");
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldComputeDiffGivenDeletedObject() {
|
||||
Pair<JsonNode, JsonNode> value = JacksonMapper.getBiDirectionalDiffs(new DummyObject("value"), null);
|
||||
// Patch
|
||||
assertThat(value.getLeft().toString()).isEqualTo("[{\"op\":\"replace\",\"path\":\"\",\"value\":null}]");
|
||||
// Revert
|
||||
assertThat(value.getRight().toString()).isEqualTo("[{\"op\":\"replace\",\"path\":\"\",\"value\":{\"value\":\"value\"}}]");
|
||||
}
|
||||
|
||||
private record DummyObject(String value){}
|
||||
|
||||
|
||||
@Getter
|
||||
@NoArgsConstructor
|
||||
|
||||
@@ -204,6 +204,10 @@ class FlowTopologyServiceTest {
|
||||
io.kestra.plugin.core.trigger.Flow.UpstreamFlow.builder().namespace("io.kestra.ee").flowId("parent").build(),
|
||||
io.kestra.plugin.core.trigger.Flow.UpstreamFlow.builder().namespace("io.kestra.others").flowId("invalid").build()
|
||||
))
|
||||
// add an always true condition to validate that it's an AND between 'flows' and 'where'
|
||||
.where(List.of(io.kestra.plugin.core.trigger.Flow.ExecutionFilter.builder()
|
||||
.filters(List.of(io.kestra.plugin.core.trigger.Flow.Filter.builder().field(io.kestra.plugin.core.trigger.Flow.Field.EXPRESSION).type(io.kestra.plugin.core.trigger.Flow.Type.IS_NOT_NULL).value("something").build()))
|
||||
.build()))
|
||||
.build()
|
||||
)
|
||||
.build()
|
||||
|
||||
@@ -0,0 +1,304 @@
|
||||
package io.kestra.core.topologies;
|
||||
|
||||
import io.kestra.core.exceptions.FlowProcessingException;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.flows.FlowWithSource;
|
||||
import io.kestra.core.models.topologies.FlowTopology;
|
||||
import io.kestra.core.repositories.FlowRepositoryInterface;
|
||||
import io.kestra.core.repositories.FlowTopologyRepositoryInterface;
|
||||
import io.kestra.core.services.FlowService;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
@KestraTest
|
||||
public class FlowTopologyTest {
|
||||
@Inject
|
||||
private FlowService flowService;
|
||||
@Inject
|
||||
private FlowTopologyService flowTopologyService;
|
||||
@Inject
|
||||
private FlowTopologyRepositoryInterface flowTopologyRepository;
|
||||
|
||||
@Test
|
||||
void should_findDependencies_simpleCase() throws FlowProcessingException {
|
||||
// Given
|
||||
var tenantId = randomTenantId();
|
||||
var child = flowService.importFlow(tenantId,
|
||||
"""
|
||||
id: child
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: download
|
||||
type: io.kestra.plugin.core.http.Download
|
||||
""");
|
||||
var parent = flowService.importFlow(tenantId, """
|
||||
id: parent
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: subflow
|
||||
type: io.kestra.core.tasks.flows.Flow
|
||||
flowId: child
|
||||
namespace: io.kestra.unittest
|
||||
""");
|
||||
var unrelatedFlow = flowService.importFlow(tenantId, """
|
||||
id: unrelated_flow
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: download
|
||||
type: io.kestra.plugin.core.http.Download
|
||||
""");
|
||||
|
||||
// When
|
||||
computeAndSaveTopologies(List.of(child, parent, unrelatedFlow));
|
||||
System.out.println();
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
|
||||
var dependencies = flowService.findDependencies(tenantId, "io.kestra.unittest", parent.getId(), false, true);
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
|
||||
// Then
|
||||
assertThat(dependencies.map(FlowTopologyTestData::of))
|
||||
.containsExactlyInAnyOrder(
|
||||
new FlowTopologyTestData(parent, child)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void should_findDependencies_subchildAndSuperParent() throws FlowProcessingException {
|
||||
// Given
|
||||
var tenantId = randomTenantId();
|
||||
var subChild = flowService.importFlow(tenantId,
|
||||
"""
|
||||
id: sub_child
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: download
|
||||
type: io.kestra.plugin.core.http.Download
|
||||
""");
|
||||
var child = flowService.importFlow(tenantId,
|
||||
"""
|
||||
id: child
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: subflow
|
||||
type: io.kestra.core.tasks.flows.Flow
|
||||
flowId: sub_child
|
||||
namespace: io.kestra.unittest
|
||||
""");
|
||||
var superParent = flowService.importFlow(tenantId, """
|
||||
id: super_parent
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: subflow
|
||||
type: io.kestra.core.tasks.flows.Flow
|
||||
flowId: parent
|
||||
namespace: io.kestra.unittest
|
||||
""");
|
||||
var parent = flowService.importFlow(tenantId, """
|
||||
id: parent
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: subflow
|
||||
type: io.kestra.core.tasks.flows.Flow
|
||||
flowId: child
|
||||
namespace: io.kestra.unittest
|
||||
""");
|
||||
var unrelatedFlow = flowService.importFlow(tenantId, """
|
||||
id: unrelated_flow
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: download
|
||||
type: io.kestra.plugin.core.http.Download
|
||||
""");
|
||||
|
||||
// When
|
||||
computeAndSaveTopologies(List.of(subChild, child, superParent, parent, unrelatedFlow));
|
||||
System.out.println();
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
System.out.println();
|
||||
|
||||
var dependencies = flowService.findDependencies(tenantId, "io.kestra.unittest", parent.getId(), false, true);
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
|
||||
// Then
|
||||
assertThat(dependencies.map(FlowTopologyTestData::of))
|
||||
.containsExactlyInAnyOrder(
|
||||
new FlowTopologyTestData(superParent, parent),
|
||||
new FlowTopologyTestData(parent, child),
|
||||
new FlowTopologyTestData(child, subChild)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void should_findDependencies_cyclicTriggers() throws FlowProcessingException {
|
||||
// Given
|
||||
var tenantId = randomTenantId();
|
||||
var triggeredFlowOne = flowService.importFlow(tenantId,
|
||||
"""
|
||||
id: triggered_flow_one
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: download
|
||||
type: io.kestra.plugin.core.http.Download
|
||||
triggers:
|
||||
- id: listen
|
||||
type: io.kestra.plugin.core.trigger.Flow
|
||||
conditions:
|
||||
- type: io.kestra.plugin.core.condition.ExecutionStatus
|
||||
in:
|
||||
- FAILED
|
||||
""");
|
||||
var triggeredFlowTwo = flowService.importFlow(tenantId, """
|
||||
id: triggered_flow_two
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: download
|
||||
type: io.kestra.plugin.core.http.Download
|
||||
triggers:
|
||||
- id: listen
|
||||
type: io.kestra.plugin.core.trigger.Flow
|
||||
conditions:
|
||||
- type: io.kestra.plugin.core.condition.ExecutionStatus
|
||||
in:
|
||||
- FAILED
|
||||
""");
|
||||
|
||||
// When
|
||||
computeAndSaveTopologies(List.of(triggeredFlowOne, triggeredFlowTwo));
|
||||
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
|
||||
var dependencies = flowService.findDependencies(tenantId, "io.kestra.unittest", triggeredFlowTwo.getId(), false, true).toList();
|
||||
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
|
||||
// Then
|
||||
assertThat(dependencies.stream().map(FlowTopologyTestData::of))
|
||||
.containsExactlyInAnyOrder(
|
||||
new FlowTopologyTestData(triggeredFlowTwo, triggeredFlowOne),
|
||||
new FlowTopologyTestData(triggeredFlowOne, triggeredFlowTwo)
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void flowTriggerWithTargetFlow() throws FlowProcessingException {
|
||||
// Given
|
||||
var tenantId = randomTenantId();
|
||||
var parent = flowService.importFlow(tenantId,
|
||||
"""
|
||||
id: parent
|
||||
namespace: io.kestra.unittest
|
||||
inputs:
|
||||
- id: a
|
||||
type: BOOL
|
||||
defaults: true
|
||||
|
||||
- id: b
|
||||
type: BOOL
|
||||
defaults: "{{ inputs.a == true }}"
|
||||
dependsOn:
|
||||
inputs:
|
||||
- a
|
||||
tasks:
|
||||
- id: helloA
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Hello A
|
||||
""");
|
||||
var child = flowService.importFlow(tenantId, """
|
||||
id: child
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: helloB
|
||||
type: io.kestra.plugin.core.log.Log
|
||||
message: Hello B
|
||||
triggers:
|
||||
- id: release
|
||||
type: io.kestra.plugin.core.trigger.Flow
|
||||
states:
|
||||
- SUCCESS
|
||||
preconditions:
|
||||
id: flows
|
||||
flows:
|
||||
- namespace: io.kestra.unittest
|
||||
flowId: parent
|
||||
""");
|
||||
var unrelatedFlow = flowService.importFlow(tenantId, """
|
||||
id: unrelated_flow
|
||||
namespace: io.kestra.unittest
|
||||
tasks:
|
||||
- id: download
|
||||
type: io.kestra.plugin.core.http.Download
|
||||
""");
|
||||
|
||||
// When
|
||||
computeAndSaveTopologies(List.of(child, parent, unrelatedFlow));
|
||||
System.out.println();
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
|
||||
var dependencies = flowService.findDependencies(tenantId, "io.kestra.unittest", parent.getId(), false, true);
|
||||
flowTopologyRepository.findAll(tenantId).forEach(topology -> {
|
||||
System.out.println(FlowTopologyTestData.of(topology));
|
||||
});
|
||||
|
||||
// Then
|
||||
assertThat(dependencies.map(FlowTopologyTestData::of))
|
||||
.containsExactlyInAnyOrder(
|
||||
new FlowTopologyTestData(parent, child)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* this function mimics the production behaviour
|
||||
*/
|
||||
private void computeAndSaveTopologies(List<@NotNull FlowWithSource> flows) {
|
||||
flows.forEach(flow ->
|
||||
flowTopologyService
|
||||
.topology(
|
||||
flow,
|
||||
flows
|
||||
).distinct()
|
||||
.forEach(topology -> flowTopologyRepository.save(topology))
|
||||
);
|
||||
}
|
||||
|
||||
private static String randomTenantId() {
|
||||
return FlowTopologyTest.class + IdUtils.create();
|
||||
}
|
||||
|
||||
|
||||
record FlowTopologyTestData(String sourceUid, String destinationUid) {
|
||||
public FlowTopologyTestData(FlowWithSource parent, FlowWithSource child) {
|
||||
this(parent.uidWithoutRevision(), child.uidWithoutRevision());
|
||||
}
|
||||
|
||||
public static FlowTopologyTestData of(FlowTopology flowTopology) {
|
||||
return new FlowTopologyTestData(flowTopology.getSource().getUid(), flowTopology.getDestination().getUid());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return sourceUid + " -> " + destinationUid;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
class MapUtilsTest {
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -208,10 +207,13 @@ class MapUtilsTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldThrowIfNestedMapContainsMultipleEntries() {
|
||||
var exception = assertThrows(IllegalArgumentException.class,
|
||||
() -> MapUtils.nestedToFlattenMap(Map.of("k1", Map.of("k2", Map.of("k3", "v1"), "k4", "v2")))
|
||||
);
|
||||
assertThat(exception.getMessage()).isEqualTo("You cannot flatten a map with an entry that is a map of more than one element, conflicting key: k1");
|
||||
void shouldFlattenANestedMapWithDuplicateKeys() {
|
||||
Map<String, Object> results = MapUtils.nestedToFlattenMap(Map.of("k1", Map.of("k2", Map.of("k3", "v1"), "k4", "v2")));
|
||||
|
||||
assertThat(results).hasSize(2);
|
||||
assertThat(results).containsAllEntriesOf(Map.of(
|
||||
"k1.k2.k3", "v1",
|
||||
"k1.k4", "v2"
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
version=1.0.0-SNAPSHOT
|
||||
version=1.0.1
|
||||
|
||||
org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m -XX:+HeapDumpOnOutOfMemoryError
|
||||
org.gradle.parallel=true
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- We must truncate the table as in 0.24 there was a bug that lead to records not purged in this table
|
||||
truncate table execution_running;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- We must truncate the table as in 0.24 there was a bug that lead to records not purged in this table
|
||||
truncate table execution_running;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- We must truncate the table as in 0.24 there was a bug that lead to records not purged in this table
|
||||
truncate table execution_running;
|
||||
@@ -150,13 +150,8 @@ public abstract class AbstractJdbcDashboardRepository extends AbstractJdbcReposi
|
||||
fields.put(field("source_code"), source);
|
||||
|
||||
this.jdbcRepository.persist(dashboard, fields);
|
||||
|
||||
if (previousDashboard == null) {
|
||||
eventPublisher.publishEvent(new CrudEvent<>(dashboard, CrudEventType.CREATE));
|
||||
} else {
|
||||
eventPublisher.publishEvent(new CrudEvent<>(dashboard, previousDashboard, CrudEventType.UPDATE));
|
||||
}
|
||||
|
||||
this.eventPublisher.publishEvent(CrudEvent.of(previousDashboard, dashboard));
|
||||
|
||||
return dashboard;
|
||||
}
|
||||
|
||||
@@ -174,8 +169,7 @@ public abstract class AbstractJdbcDashboardRepository extends AbstractJdbcReposi
|
||||
fields.put(field("source_code"), deleted.getSourceCode());
|
||||
|
||||
this.jdbcRepository.persist(deleted, fields);
|
||||
|
||||
eventPublisher.publishEvent(new CrudEvent<>(dashboard.get(), CrudEventType.DELETE));
|
||||
this.eventPublisher.publishEvent(CrudEvent.delete(dashboard.get()));
|
||||
|
||||
return deleted;
|
||||
}
|
||||
|
||||
@@ -969,14 +969,16 @@ public abstract class AbstractJdbcExecutionRepository extends AbstractJdbcReposi
|
||||
|
||||
executionQueue().emit(deleted);
|
||||
|
||||
eventPublisher.publishEvent(new CrudEvent<>(deleted, CrudEventType.DELETE));
|
||||
eventPublisher.publishEvent(CrudEvent.delete(deleted));
|
||||
|
||||
return deleted;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer purge(Execution execution) {
|
||||
return this.jdbcRepository.delete(execution);
|
||||
int delete = this.jdbcRepository.delete(execution);
|
||||
eventPublisher.publishEvent(CrudEvent.delete(execution));
|
||||
return delete;
|
||||
}
|
||||
|
||||
public Executor lock(String executionId, Function<Pair<Execution, ExecutorState>, Pair<Executor, ExecutorState>> function) {
|
||||
|
||||
@@ -700,12 +700,7 @@ public abstract class AbstractJdbcFlowRepository extends AbstractJdbcRepository
|
||||
this.jdbcRepository.persist(flow, fields);
|
||||
|
||||
flowQueue.emit(flow);
|
||||
|
||||
if (nullOrExisting != null) {
|
||||
eventPublisher.publishEvent(new CrudEvent<>(flow, nullOrExisting, crudEventType));
|
||||
} else {
|
||||
eventPublisher.publishEvent(new CrudEvent<>(flow, crudEventType));
|
||||
}
|
||||
eventPublisher.publishEvent(new CrudEvent<>(flow, nullOrExisting, crudEventType));
|
||||
|
||||
return flowWithSource.toBuilder().revision(revision).build();
|
||||
}
|
||||
@@ -735,8 +730,7 @@ public abstract class AbstractJdbcFlowRepository extends AbstractJdbcRepository
|
||||
this.jdbcRepository.persist(deleted, fields);
|
||||
|
||||
flowQueue.emit(deleted);
|
||||
|
||||
eventPublisher.publishEvent(new CrudEvent<>(flow, CrudEventType.DELETE));
|
||||
eventPublisher.publishEvent(CrudEvent.delete(flow));
|
||||
|
||||
return deleted;
|
||||
}
|
||||
|
||||
@@ -67,8 +67,7 @@ public abstract class AbstractJdbcSettingRepository extends AbstractJdbcReposito
|
||||
public Setting save(Setting setting) {
|
||||
Map<Field<Object>, Object> fields = this.jdbcRepository.persistFields(setting);
|
||||
this.jdbcRepository.persist(setting, fields);
|
||||
|
||||
eventPublisher.publishEvent(new CrudEvent<>(setting, CrudEventType.UPDATE));
|
||||
this.eventPublisher.publishEvent(new CrudEvent<>(setting, CrudEventType.UPDATE));
|
||||
|
||||
return setting;
|
||||
}
|
||||
@@ -82,8 +81,7 @@ public abstract class AbstractJdbcSettingRepository extends AbstractJdbcReposito
|
||||
}
|
||||
|
||||
this.jdbcRepository.delete(setting);
|
||||
|
||||
eventPublisher.publishEvent(new CrudEvent<>(setting, CrudEventType.DELETE));
|
||||
this.eventPublisher.publishEvent(CrudEvent.delete(setting));
|
||||
|
||||
return setting;
|
||||
}
|
||||
|
||||
@@ -174,7 +174,7 @@ public abstract class AbstractJdbcTemplateRepository extends AbstractJdbcReposit
|
||||
|
||||
try {
|
||||
templateQueue.emit(template);
|
||||
eventPublisher.publishEvent(new CrudEvent<>(template, CrudEventType.CREATE));
|
||||
eventPublisher.publishEvent(CrudEvent.create(template));
|
||||
|
||||
return template;
|
||||
} catch (QueueException e) {
|
||||
@@ -217,7 +217,7 @@ public abstract class AbstractJdbcTemplateRepository extends AbstractJdbcReposit
|
||||
|
||||
try {
|
||||
templateQueue.emit(deleted);
|
||||
eventPublisher.publishEvent(new CrudEvent<>(deleted, CrudEventType.DELETE));
|
||||
eventPublisher.publishEvent(CrudEvent.delete(deleted));
|
||||
} catch (QueueException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
@@ -663,6 +663,14 @@ public abstract class StorageTestSuite {
|
||||
put(tenantId, prefix);
|
||||
}
|
||||
|
||||
@Test
|
||||
void put_PathWithTenantStringInIt() throws Exception {
|
||||
String tenantId = IdUtils.create();
|
||||
String prefix = tenantId + "/" + IdUtils.create();
|
||||
|
||||
put(tenantId, prefix);
|
||||
}
|
||||
|
||||
@Test
|
||||
void putFromAnotherFile() throws Exception {
|
||||
String prefix = IdUtils.create();
|
||||
@@ -982,6 +990,14 @@ public abstract class StorageTestSuite {
|
||||
deleteByPrefix(prefix, tenantId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteByPrefix_PathWithTenantStringInIt() throws Exception {
|
||||
String tenantId = IdUtils.create();
|
||||
String prefix = tenantId + "/" + IdUtils.create();
|
||||
|
||||
deleteByPrefix(prefix, tenantId);
|
||||
}
|
||||
|
||||
@Test
|
||||
void deleteByPrefixNotFound() throws URISyntaxException, IOException {
|
||||
String prefix = IdUtils.create();
|
||||
|
||||
@@ -45,6 +45,10 @@
|
||||
|
||||
function removeItem(yaml: string, index: number){
|
||||
flowStore.flowYaml = yaml;
|
||||
if(items.value.length <= 1 && index === 0){
|
||||
emits("update:modelValue", undefined);
|
||||
return;
|
||||
}
|
||||
|
||||
let localItems = [...items.value]
|
||||
localItems.splice(index, 1)
|
||||
|
||||
@@ -482,6 +482,8 @@
|
||||
import {useAuthStore} from "override/stores/auth.ts";
|
||||
import {useFlowStore} from "../../stores/flow.ts";
|
||||
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
|
||||
export default {
|
||||
mixins: [RouteContext, RestoreUrl, DataTableActions, SelectTableActions],
|
||||
components: {
|
||||
@@ -705,15 +707,12 @@
|
||||
}
|
||||
},
|
||||
beforeRouteEnter(to, _, next) {
|
||||
const defaultNamespace = localStorage.getItem(
|
||||
storageKeys.DEFAULT_NAMESPACE,
|
||||
);
|
||||
const query = {...to.query};
|
||||
let queryHasChanged = false;
|
||||
|
||||
const queryKeys = Object.keys(query);
|
||||
if (this?.namespace === undefined && defaultNamespace && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace;
|
||||
if (this?.namespace === undefined && defaultNamespace() && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace();
|
||||
queryHasChanged = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
<el-button-group v-else-if="isURI(value)">
|
||||
<el-button
|
||||
type="primary"
|
||||
tag="a"
|
||||
size="small"
|
||||
:href="value"
|
||||
target="_blank"
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
import RouteContext from "../../mixins/routeContext";
|
||||
import TopNavBar from "../../components/layout/TopNavBar.vue";
|
||||
import MultiPanelFlowEditorView from "./MultiPanelFlowEditorView.vue";
|
||||
import {storageKeys} from "../../utils/constants";
|
||||
import {useBlueprintsStore} from "../../stores/blueprints";
|
||||
import {useCoreStore} from "../../stores/core";
|
||||
import {editorViewTypes} from "../../utils/constants";
|
||||
@@ -19,6 +18,7 @@
|
||||
import {getRandomID} from "../../../scripts/id";
|
||||
import {useEditorStore} from "../../stores/editor";
|
||||
import {useFlowStore} from "../../stores/flow";
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
|
||||
export default {
|
||||
mixins: [RouteContext],
|
||||
@@ -50,8 +50,7 @@
|
||||
} else if (blueprintId && blueprintSource) {
|
||||
flowYaml = await this.blueprintsStore.getBlueprintSource({type: blueprintSource, kind: "flow", id: blueprintId});
|
||||
} else {
|
||||
const defaultNamespace = localStorage.getItem(storageKeys.DEFAULT_NAMESPACE);
|
||||
const selectedNamespace = this.$route.query.namespace || defaultNamespace || "company.team";
|
||||
const selectedNamespace = this.$route.query.namespace || defaultNamespace() || "company.team";
|
||||
flowYaml = `id: ${getRandomID()}
|
||||
namespace: ${selectedNamespace}
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
setTimeout(() => {
|
||||
this.flowStore
|
||||
.loadDependencies({namespace: flow.namespace, id: flow.id}, true)
|
||||
.then(({count}) => this.dependenciesCount = count);
|
||||
.then(({count}) => this.dependenciesCount = count > 0 ? (count - 1) : 0);
|
||||
}, 1000);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -295,7 +295,6 @@
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import {mapState} from "vuex";
|
||||
import {mapStores} from "pinia";
|
||||
import {useExecutionsStore} from "../../stores/executions";
|
||||
import _merge from "lodash/merge";
|
||||
@@ -314,7 +313,7 @@
|
||||
import MarkdownTooltip from "../layout/MarkdownTooltip.vue";
|
||||
import Kicon from "../Kicon.vue";
|
||||
import Labels from "../layout/Labels.vue";
|
||||
import {storageKeys} from "../../utils/constants";
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
import * as YAML_UTILS from "@kestra-io/ui-libs/flow-yaml-utils";
|
||||
import YAML_CHART from "../dashboard/assets/executions_timeseries_chart.yaml?raw";
|
||||
import {useAuthStore} from "override/stores/auth.ts";
|
||||
@@ -431,7 +430,6 @@
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
...mapState("auth", ["user"]),
|
||||
...mapStores(useExecutionsStore, useFlowStore, useAuthStore),
|
||||
user() {
|
||||
return this.authStore.user;
|
||||
@@ -486,14 +484,11 @@
|
||||
}
|
||||
},
|
||||
beforeRouteEnter(to, _, next) {
|
||||
const defaultNamespace = localStorage.getItem(
|
||||
storageKeys.DEFAULT_NAMESPACE,
|
||||
);
|
||||
const query = {...to.query};
|
||||
let queryHasChanged = false;
|
||||
const queryKeys = Object.keys(query);
|
||||
if (defaultNamespace && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace;
|
||||
if (defaultNamespace() && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace();
|
||||
queryHasChanged = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@
|
||||
import * as YAML_UTILS from "@kestra-io/ui-libs/flow-yaml-utils";
|
||||
import YAML_CHART from "../dashboard/assets/logs_timeseries_chart.yaml?raw";
|
||||
import {useLogsStore} from "../../stores/logs";
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
|
||||
export default {
|
||||
mixins: [RouteContext, RestoreUrl, DataTableActions],
|
||||
@@ -147,15 +148,12 @@
|
||||
}
|
||||
},
|
||||
beforeRouteEnter(to, _, next) {
|
||||
const defaultNamespace = localStorage.getItem(
|
||||
storageKeys.DEFAULT_NAMESPACE,
|
||||
);
|
||||
const query = {...to.query};
|
||||
let queryHasChanged = false;
|
||||
|
||||
const queryKeys = Object.keys(query);
|
||||
if (defaultNamespace && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace;
|
||||
if (defaultNamespace() && !queryKeys.some(key => key.startsWith("filters[namespace]"))) {
|
||||
query["filters[namespace][PREFIX]"] = defaultNamespace();
|
||||
queryHasChanged = true;
|
||||
}
|
||||
|
||||
@@ -170,12 +168,6 @@
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
LogFilterLanguage() {
|
||||
return LogFilterLanguage
|
||||
},
|
||||
onDateFilterTypeChange(event) {
|
||||
this.canAutoRefresh = event;
|
||||
},
|
||||
showStatChart() {
|
||||
return this.showChart;
|
||||
},
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
import {useNamespacesStore} from "override/stores/namespaces"
|
||||
import DotsSquare from "vue-material-design-icons/DotsSquare.vue"
|
||||
import Lock from "vue-material-design-icons/Lock.vue";
|
||||
import {storageKeys} from "../../../utils/constants";
|
||||
import {defaultNamespace} from "../../../composables/useNamespaces";
|
||||
|
||||
const {t} = useI18n();
|
||||
|
||||
@@ -79,13 +79,13 @@
|
||||
|
||||
onMounted(() => {
|
||||
if (modelValue.value === undefined || modelValue.value.length === 0) {
|
||||
const defaultNamespace = localStorage.getItem(storageKeys.DEFAULT_NAMESPACE);
|
||||
const defaultNamespaceVal = defaultNamespace();
|
||||
if (Array.isArray(modelValue.value)) {
|
||||
if (defaultNamespace != null) {
|
||||
modelValue.value = [defaultNamespace];
|
||||
if (defaultNamespaceVal != null) {
|
||||
modelValue.value = [defaultNamespaceVal];
|
||||
}
|
||||
} else {
|
||||
modelValue.value = defaultNamespace ?? modelValue.value;
|
||||
modelValue.value = defaultNamespaceVal ?? modelValue.value;
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -277,6 +277,7 @@
|
||||
import Column from "./components/block/Column.vue"
|
||||
import {useAuthStore} from "override/stores/auth"
|
||||
import {useFlowStore} from "../../stores/flow"
|
||||
import {defaultNamespace} from "../../composables/useNamespaces";
|
||||
|
||||
export const DATE_FORMAT_STORAGE_KEY = "dateFormat";
|
||||
export const TIMEZONE_STORAGE_KEY = "timezone";
|
||||
@@ -342,7 +343,7 @@
|
||||
};
|
||||
},
|
||||
created() {
|
||||
this.pendingSettings.defaultNamespace = localStorage.getItem("defaultNamespace") || "company.team";
|
||||
this.pendingSettings.defaultNamespace = defaultNamespace();
|
||||
this.pendingSettings.editorType = localStorage.getItem(storageKeys.EDITOR_VIEW_TYPE) || "YAML";
|
||||
this.pendingSettings.defaultLogLevel = localStorage.getItem("defaultLogLevel") || "INFO";
|
||||
this.pendingSettings.lang = Utils.getLang();
|
||||
|
||||
@@ -173,9 +173,6 @@
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
onDateFilterTypeChange(event) {
|
||||
this.canAutoRefresh = event;
|
||||
},
|
||||
isRunning(item){
|
||||
return State.isRunning(item.state.current);
|
||||
},
|
||||
|
||||
@@ -47,14 +47,14 @@ export function useDataTableActions(options: DataTableActionsOptions = {}) {
|
||||
const embed = computed(() => options.embed);
|
||||
const dataTableRef = computed(() => options.dataTableRef?.value);
|
||||
|
||||
const sortString = (sortItem: SortItem): string | undefined => {
|
||||
const sortString = (sortItem: SortItem, sortKeyMapper: (k: string) => string): string | undefined => {
|
||||
if (sortItem && sortItem.prop && sortItem.order) {
|
||||
return `${sortItem.prop}:${sortItem.order === "descending" ? "desc" : "asc"}`;
|
||||
return `${sortKeyMapper(sortItem.prop)}:${sortItem.order === "descending" ? "desc" : "asc"}`;
|
||||
}
|
||||
};
|
||||
|
||||
const onSort = (sortItem: SortItem) => {
|
||||
internalSort.value = sortString(sortItem);
|
||||
const onSort = (sortItem: SortItem, sortKeyMapper = (k: string) => k) => {
|
||||
internalSort.value = sortString(sortItem, sortKeyMapper);
|
||||
|
||||
if (internalSort.value) {
|
||||
const sort = internalSort.value;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {Store} from "vuex";
|
||||
import {EntityIterator} from "./entityIterator.ts";
|
||||
import {useNamespacesStore} from "override/stores/namespaces.ts";
|
||||
import {storageKeys} from "../utils/constants.ts";
|
||||
|
||||
export interface Namespace {
|
||||
id: string;
|
||||
@@ -22,6 +23,10 @@ export class NamespaceIterator extends EntityIterator<Namespace>{
|
||||
}
|
||||
}
|
||||
|
||||
export function defaultNamespace() {
|
||||
return localStorage.getItem(storageKeys.DEFAULT_NAMESPACE);
|
||||
}
|
||||
|
||||
export default function useNamespaces(store: Store<any>, fetchSize: number, options?: any): NamespaceIterator {
|
||||
return new NamespaceIterator(store, fetchSize, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {computed, nextTick, onMounted, ref} from "vue";
|
||||
import {useRoute, useRouter} from "vue-router";
|
||||
import {defaultNamespace} from "./useNamespaces.ts";
|
||||
|
||||
interface UseRestoreUrlOptions {
|
||||
restoreUrl?: boolean;
|
||||
@@ -57,8 +58,8 @@ export default function useRestoreUrl(options: UseRestoreUrlOptions = {}) {
|
||||
|
||||
let change = false;
|
||||
|
||||
if (!localExist && isDefaultNamespaceAllow && localStorage.getItem("defaultNamespace")) {
|
||||
local["namespace"] = localStorage.getItem("defaultNamespace");
|
||||
if (!localExist && isDefaultNamespaceAllow && defaultNamespace()) {
|
||||
local["namespace"] = defaultNamespace();
|
||||
}
|
||||
|
||||
for (const key in local) {
|
||||
|
||||
@@ -44,13 +44,13 @@ export default {
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
sortString(sortItem) {
|
||||
sortString(sortItem, sortKeyMapper) {
|
||||
if (sortItem && sortItem.prop && sortItem.order) {
|
||||
return `${sortItem.prop}:${sortItem.order === "descending" ? "desc" : "asc"}`;
|
||||
return `${sortKeyMapper(sortItem.prop)}:${sortItem.order === "descending" ? "desc" : "asc"}`;
|
||||
}
|
||||
},
|
||||
onSort(sortItem) {
|
||||
this.internalSort = this.sortString(sortItem);
|
||||
onSort(sortItem, sortKeyMapper = (k) => k) {
|
||||
this.internalSort = this.sortString(sortItem, sortKeyMapper);
|
||||
|
||||
if (this.internalSort) {
|
||||
const sort = this.internalSort;
|
||||
|
||||
@@ -165,10 +165,9 @@ export default {
|
||||
.then(message => {
|
||||
this.$toast()
|
||||
.confirm(message, () => {
|
||||
// TODO: When flow store is migrated to Pinia, this will be simplified:
|
||||
const deletePromise = this.dataType === "template"
|
||||
? this.templateStore.deleteTemplate(item)
|
||||
: this.$store.dispatch(`${this.dataType}/delete${this.dataType.capitalize()}`, item);
|
||||
: this.flowStore.deleteFlow(item);
|
||||
|
||||
return deletePromise
|
||||
.then(() => {
|
||||
@@ -249,7 +248,7 @@ export default {
|
||||
// TODO: When flow store is migrated to Pinia, this will be simplified:
|
||||
const createPromise = this.dataType === "template"
|
||||
? this.templateStore.createTemplate({template: this.content})
|
||||
: this.$store.dispatch(`${this.dataType}/create${this.dataType.capitalize()}`, {[this.dataType]: this.content});
|
||||
: this.flowStore.createFlow({flow: this.content});
|
||||
|
||||
createPromise
|
||||
.then((data) => {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import {defaultNamespace} from "../composables/useNamespaces.js";
|
||||
|
||||
export default {
|
||||
props: {
|
||||
restoreUrl: {
|
||||
@@ -55,8 +57,8 @@ export default {
|
||||
|
||||
let change = false
|
||||
|
||||
if (!localExist && this.isDefaultNamespaceAllow && localStorage.getItem("defaultNamespace")) {
|
||||
local["namespace"] = localStorage.getItem("defaultNamespace");
|
||||
if (!localExist && this.isDefaultNamespaceAllow && defaultNamespace()) {
|
||||
local["namespace"] = defaultNamespace();
|
||||
}
|
||||
|
||||
for (const key in local) {
|
||||
|
||||
@@ -69,7 +69,7 @@
|
||||
import {useRoute} from "vue-router";
|
||||
import useRouteContext from "../../../mixins/useRouteContext.ts";
|
||||
import {useStore} from "vuex";
|
||||
import useNamespaces, {Namespace} from "../../../composables/useNamespaces.ts";
|
||||
import useNamespaces, {Namespace} from "../../../composables/useNamespaces";
|
||||
import {useI18n} from "vue-i18n";
|
||||
import {useMiscStore} from "override/stores/misc";
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {defineStore} from "pinia"
|
||||
import {trackFileOpen} from "../utils/tabTracking";
|
||||
import {useNamespacesStore} from "../override/stores/namespaces";
|
||||
|
||||
export interface EditorTabProps {
|
||||
name: string;
|
||||
@@ -24,14 +25,15 @@ export const useEditorStore = defineStore("editor", {
|
||||
}),
|
||||
actions: {
|
||||
saveAllTabs({namespace}: {namespace: string}) {
|
||||
const namespaceStore = useNamespacesStore();
|
||||
return Promise.all(
|
||||
this.tabs.map(async (tab) => {
|
||||
if(tab.flow) return;
|
||||
await this.vuexStore.dispatch("namespace/createFile", {
|
||||
if(tab.flow || !tab.content) return;
|
||||
await namespaceStore.createFile( {
|
||||
namespace,
|
||||
path: tab.path ?? tab.name,
|
||||
content: tab.content,
|
||||
}, {root: true});
|
||||
});
|
||||
this.setTabDirty({
|
||||
name: tab.name,
|
||||
path: tab.path,
|
||||
@@ -55,7 +57,7 @@ export const useEditorStore = defineStore("editor", {
|
||||
if (index === -1) {
|
||||
this.tabs.push({name, extension, persistent, path, flow});
|
||||
isDirty = false;
|
||||
|
||||
|
||||
if (path && !flow) {
|
||||
const fileName = name || path.split("/").pop() || "";
|
||||
trackFileOpen(fileName);
|
||||
|
||||
@@ -4,7 +4,7 @@ import permission from "../models/permission";
|
||||
import action from "../models/action";
|
||||
import * as YAML_UTILS from "@kestra-io/ui-libs/flow-yaml-utils";
|
||||
import Utils from "../utils/utils";
|
||||
import {editorViewTypes, storageKeys} from "../utils/constants";
|
||||
import {editorViewTypes} from "../utils/constants";
|
||||
import {apiUrl} from "override/utils/route";
|
||||
import {useCoreStore} from "./core";
|
||||
import {useEditorStore} from "./editor";
|
||||
@@ -18,6 +18,7 @@ import {transformResponse} from "../components/dependencies/composables/useDepen
|
||||
import {useNamespacesStore} from "override/stores/namespaces";
|
||||
import {useAuthStore} from "override/stores/auth";
|
||||
import {useRoute} from "vue-router";
|
||||
import {defaultNamespace} from "../composables/useNamespaces.ts";
|
||||
|
||||
const textYamlHeader = {
|
||||
headers: {
|
||||
@@ -134,8 +135,7 @@ export const useFlowStore = defineStore("flow", () => {
|
||||
const route = useRoute();
|
||||
|
||||
const getNamespace = () => {
|
||||
const defaultNamespace = localStorage.getItem(storageKeys.DEFAULT_NAMESPACE);
|
||||
return route.query.namespace || defaultNamespace || "company.team";
|
||||
return route.query.namespace || defaultNamespace();
|
||||
}
|
||||
|
||||
async function save({content, namespace}: { content?: string, namespace?: string }) {
|
||||
|
||||
@@ -157,7 +157,7 @@ public class MiscController {
|
||||
@ExecuteOn(TaskExecutors.IO)
|
||||
@Operation(tags = {"Misc"}, summary = "Configure basic authentication for the instance.", description = "Sets up basic authentication credentials.")
|
||||
public HttpResponse<Void> createBasicAuth(
|
||||
@RequestBody(description = "") @Body BasicAuthCredentials basicAuthCredentials
|
||||
@RequestBody @Body BasicAuthCredentials basicAuthCredentials
|
||||
) {
|
||||
basicAuthService.save(basicAuthCredentials.getUid(), new BasicAuthService.BasicAuthConfiguration(basicAuthCredentials.getUsername(), basicAuthCredentials.getPassword()));
|
||||
|
||||
|
||||
@@ -18,14 +18,13 @@ import io.micronaut.web.router.RouteMatch;
|
||||
import io.micronaut.web.router.RouteMatchUtils;
|
||||
import jakarta.inject.Inject;
|
||||
import org.reactivestreams.Publisher;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
import java.util.Base64;
|
||||
import java.util.Collection;
|
||||
import java.util.Optional;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
//We want to authenticate only Kestra endpoints
|
||||
@Filter("/api/v1/**")
|
||||
@Requires(property = "kestra.server-type", pattern = "(WEBSERVER|STANDALONE)")
|
||||
@@ -48,7 +47,7 @@ public class AuthenticationFilter implements HttpServerFilter {
|
||||
public Publisher<MutableHttpResponse<?>> doFilter(HttpRequest<?> request, ServerFilterChain chain) {
|
||||
return Mono.fromCallable(() -> {
|
||||
SaltedBasicAuthConfiguration configuration = basicAuthService.configuration();
|
||||
if (configuration == null ){
|
||||
if (configuration == null) {
|
||||
configuration = new SaltedBasicAuthConfiguration();
|
||||
}
|
||||
return configuration;
|
||||
@@ -57,8 +56,10 @@ public class AuthenticationFilter implements HttpServerFilter {
|
||||
.flux()
|
||||
.flatMap(basicAuthConfiguration -> {
|
||||
boolean isConfigEndpoint = request.getPath().endsWith("/configs")
|
||||
|| request.getPath().endsWith("/basicAuth")
|
||||
|| request.getPath().endsWith("/basicAuthValidationErrors");
|
||||
|| (
|
||||
(request.getPath().endsWith("/basicAuth") || request.getPath().endsWith("/basicAuthValidationErrors"))
|
||||
&& !basicAuthService.isBasicAuthInitialized()
|
||||
);
|
||||
|
||||
boolean isOpenUrl = Optional.ofNullable(basicAuthConfiguration.getOpenUrls())
|
||||
.map(Collection::stream)
|
||||
|
||||
@@ -16,7 +16,7 @@ public record GeminiConfiguration (
|
||||
Double topP,
|
||||
@Nullable
|
||||
Integer topK,
|
||||
@Bindable(defaultValue = "4000")
|
||||
@Bindable(defaultValue = "8000")
|
||||
int maxOutputTokens,
|
||||
@Bindable(defaultValue = "false")
|
||||
boolean logRequests,
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
package io.kestra.webserver.filter;
|
||||
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import io.kestra.core.models.Setting;
|
||||
import io.kestra.core.repositories.SettingRepositoryInterface;
|
||||
import io.kestra.core.utils.IdUtils;
|
||||
import io.kestra.webserver.controllers.api.MiscController;
|
||||
import io.kestra.webserver.services.BasicAuthService;
|
||||
import io.micronaut.http.HttpRequest;
|
||||
import io.micronaut.http.HttpResponse;
|
||||
import io.micronaut.http.HttpStatus;
|
||||
import io.micronaut.http.MutableHttpResponse;
|
||||
import io.micronaut.http.client.annotation.Client;
|
||||
import io.micronaut.http.client.exceptions.HttpClientResponseException;
|
||||
import io.micronaut.reactor.http.client.ReactorHttpClient;
|
||||
import io.kestra.core.junit.annotations.KestraTest;
|
||||
import jakarta.inject.Inject;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import reactor.core.publisher.Mono;
|
||||
@@ -24,6 +27,9 @@ class AuthenticationFilterTest {
|
||||
@Client("/")
|
||||
private ReactorHttpClient client;
|
||||
|
||||
@Inject
|
||||
private BasicAuthService basicAuthService;
|
||||
|
||||
@Inject
|
||||
private BasicAuthService.BasicAuthConfiguration basicAuthConfiguration;
|
||||
|
||||
@@ -35,19 +41,69 @@ class AuthenticationFilterTest {
|
||||
|
||||
@Test
|
||||
void testConfigEndpointAlwaysOpen() {
|
||||
var response = client.toBlocking()
|
||||
var response = client.toBlocking()
|
||||
.exchange(HttpRequest.GET("/api/v1/configs").basicAuth("anonymous", "hacker"));
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.OK.getCode());
|
||||
}
|
||||
|
||||
response = client.toBlocking()
|
||||
.exchange(HttpRequest.GET("/api/v1/basicAuthValidationErrors").basicAuth("anonymous", "hacker"));
|
||||
@Test
|
||||
void testBasicAuthOpenedBeforeSetupOnly() {
|
||||
TestAuthFilter.ENABLED = false;
|
||||
|
||||
HttpClientResponseException httpClientResponseException = assertThrows(HttpClientResponseException.class, () -> client.toBlocking()
|
||||
.exchange(HttpRequest.GET("/api/v1/basicAuthValidationErrors")));
|
||||
assertThat(httpClientResponseException.getStatus().getCode()).isEqualTo(HttpStatus.UNAUTHORIZED.getCode());
|
||||
|
||||
httpClientResponseException = assertThrows(HttpClientResponseException.class, () -> client.toBlocking()
|
||||
.exchange(HttpRequest.POST("/api/v1/basicAuth", new MiscController.BasicAuthCredentials(
|
||||
IdUtils.create(),
|
||||
"anonymous",
|
||||
"hacker"
|
||||
))));
|
||||
assertThat(httpClientResponseException.getStatus().getCode()).isEqualTo(HttpStatus.UNAUTHORIZED.getCode());
|
||||
|
||||
HttpResponse<?> response = client.toBlocking()
|
||||
.exchange(HttpRequest.POST("/api/v1/basicAuth", new MiscController.BasicAuthCredentials(
|
||||
IdUtils.create(),
|
||||
"anonymous@hacker",
|
||||
"hackerPassword1"
|
||||
)).basicAuth(basicAuthConfiguration.getUsername(), basicAuthConfiguration.getPassword()));
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.NO_CONTENT.getCode());
|
||||
|
||||
response = client.toBlocking()
|
||||
.exchange(HttpRequest.GET("/api/v1/basicAuthValidationErrors").basicAuth("anonymous@hacker", "hackerPassword1"));
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.OK.getCode());
|
||||
|
||||
// Only 1 basic auth user is allowed so the previous one is overridden
|
||||
httpClientResponseException = assertThrows(HttpClientResponseException.class, () -> client.toBlocking()
|
||||
.exchange(HttpRequest.GET("/api/v1/basicAuthValidationErrors").basicAuth(basicAuthConfiguration.getUsername(), basicAuthConfiguration.getPassword())));
|
||||
assertThat(httpClientResponseException.getStatus().getCode()).isEqualTo(HttpStatus.UNAUTHORIZED.getCode());
|
||||
|
||||
assertThat(basicAuthService.isBasicAuthInitialized()).isTrue();
|
||||
settingRepository.delete(Setting.builder().key(BASIC_AUTH_SETTINGS_KEY).build());
|
||||
assertThat(basicAuthService.isBasicAuthInitialized()).isFalse();
|
||||
|
||||
response = client.toBlocking()
|
||||
.exchange(HttpRequest.GET("/api/v1/basicAuthValidationErrors"));
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.OK.getCode());
|
||||
|
||||
response = client.toBlocking()
|
||||
.exchange(HttpRequest.POST("/api/v1/basicAuth", new MiscController.BasicAuthCredentials(
|
||||
IdUtils.create(),
|
||||
basicAuthConfiguration.getUsername(),
|
||||
basicAuthConfiguration.getPassword()
|
||||
)));
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.NO_CONTENT.getCode());
|
||||
|
||||
assertThat(basicAuthService.isBasicAuthInitialized()).isTrue();
|
||||
|
||||
TestAuthFilter.ENABLED = true;
|
||||
}
|
||||
|
||||
@Test
|
||||
void shouldWorkWithoutPersistedConfiguration() {
|
||||
settingRepository.delete(Setting.builder().key(BASIC_AUTH_SETTINGS_KEY).build());
|
||||
var response = client.toBlocking()
|
||||
var response = client.toBlocking()
|
||||
.exchange(HttpRequest.GET("/api/v1/configs").basicAuth("anonymous", "hacker"));
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.OK.getCode());
|
||||
}
|
||||
@@ -117,7 +173,7 @@ class AuthenticationFilterTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void should_unauthorized_without_token(){
|
||||
void should_unauthorized_without_token() {
|
||||
MutableHttpResponse<?> response = Mono.from(filter.doFilter(
|
||||
HttpRequest.GET("/api/v1/main/dashboards"), null)).block();
|
||||
assertThat(response.getStatus().getCode()).isEqualTo(HttpStatus.UNAUTHORIZED.getCode());
|
||||
|
||||
@@ -18,6 +18,7 @@ import org.reactivestreams.Publisher;
|
||||
@Filter("/**")
|
||||
@Requires(env = Environment.TEST)
|
||||
public class TestAuthFilter implements HttpClientFilter {
|
||||
public static boolean ENABLED = true;
|
||||
|
||||
@Inject
|
||||
private BasicAuthConfiguration basicAuthConfiguration;
|
||||
@@ -28,16 +29,18 @@ public class TestAuthFilter implements HttpClientFilter {
|
||||
@Override
|
||||
public Publisher<? extends HttpResponse<?>> doFilter(MutableHttpRequest<?> request,
|
||||
ClientFilterChain chain) {
|
||||
//Basic auth may be removed from the database by jdbcTestUtils.drop(); / jdbcTestUtils.migrate();
|
||||
//We need it back to be able to run the tests and avoid NPE while checking the basic authorization
|
||||
if (basicAuthService.configuration() == null){
|
||||
basicAuthService.save(basicAuthConfiguration);
|
||||
}
|
||||
//Add basic authorization header if no header are present in the query
|
||||
if (request.getHeaders().getAuthorization().isEmpty()) {
|
||||
String token = "Basic " + Base64.getEncoder().encodeToString(
|
||||
(basicAuthConfiguration.getUsername() + ":" + basicAuthConfiguration.getPassword()).getBytes());
|
||||
request.getHeaders().add(HttpHeaders.AUTHORIZATION, token);
|
||||
if (ENABLED) {
|
||||
//Basic auth may be removed from the database by jdbcTestUtils.drop(); / jdbcTestUtils.migrate();
|
||||
//We need it back to be able to run the tests and avoid NPE while checking the basic authorization
|
||||
if (basicAuthService.configuration() == null) {
|
||||
basicAuthService.save(basicAuthConfiguration);
|
||||
}
|
||||
//Add basic authorization header if no header are present in the query
|
||||
if (request.getHeaders().getAuthorization().isEmpty()) {
|
||||
String token = "Basic " + Base64.getEncoder().encodeToString(
|
||||
(basicAuthConfiguration.getUsername() + ":" + basicAuthConfiguration.getPassword()).getBytes());
|
||||
request.getHeaders().add(HttpHeaders.AUTHORIZATION, token);
|
||||
}
|
||||
}
|
||||
return chain.proceed(request);
|
||||
}
|
||||
@@ -46,4 +49,4 @@ public class TestAuthFilter implements HttpClientFilter {
|
||||
public int getOrder() {
|
||||
return ServerFilterPhase.SECURITY.order() - 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user