mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 10:00:45 -05:00
Compare commits
1 Commits
v25.1.0
...
24.02.0-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
75bd3288cc |
@@ -1,11 +1,11 @@
|
||||
FROM cypress/browsers:node18.12.0-chrome106-ff106
|
||||
FROM cypress/browsers:node16.18.0-chrome90-ff88
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json yarn.lock .yarnrc $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm install yarn@1.22.22 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
RUN npm install yarn@1.22.19 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
version: '2.2'
|
||||
services:
|
||||
redash:
|
||||
build: ../
|
||||
@@ -18,7 +19,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@@ -1,3 +1,4 @@
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: ../
|
||||
@@ -66,7 +67,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@@ -1,4 +1,5 @@
|
||||
client/.tmp/
|
||||
client/dist/
|
||||
node_modules/
|
||||
viz-lib/node_modules/
|
||||
.tmp/
|
||||
|
||||
161
.github/workflows/ci.yml
vendored
161
.github/workflows/ci.yml
vendored
@@ -3,24 +3,17 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
NODE_VERSION: 16.20.1
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install black==23.1.0 ruff==0.0.287
|
||||
@@ -31,18 +24,14 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.ci.yaml
|
||||
COMPOSE_FILE: .ci/docker-compose.ci.yml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
@@ -62,15 +51,13 @@ jobs:
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: backend-test-results
|
||||
name: test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.xml
|
||||
@@ -78,47 +65,39 @@ jobs:
|
||||
frontend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: frontend-test-results
|
||||
name: test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
@@ -130,22 +109,18 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.cypress.yaml
|
||||
COMPOSE_FILE: .ci/docker-compose.cypress.yml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
|
||||
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
|
||||
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
@@ -155,7 +130,7 @@ jobs:
|
||||
echo "CODE_COVERAGE=true" >> "$GITHUB_ENV"
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
@@ -171,7 +146,93 @@ jobs:
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage
|
||||
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
- name: Skip?
|
||||
id: skip-check
|
||||
run: |
|
||||
if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then
|
||||
echo 'Docker user is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.ref_name }}" != 'master' ]]; then
|
||||
echo 'Ref name is not `master`. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
echo skip=false >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- backend-unit-tests
|
||||
- frontend-unit-tests
|
||||
- frontend-e2e-tests
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set up QEMU
|
||||
timeout-minutes: 1
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Bump version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
.ci/update_version
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG="${VERSION}.b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}"
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push preview image to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
|
||||
81
.github/workflows/periodic-snapshot.yml
vendored
81
.github/workflows/periodic-snapshot.yml
vendored
@@ -1,24 +1,11 @@
|
||||
name: Periodic Snapshot
|
||||
|
||||
# 10 minutes after midnight on the first of every month
|
||||
on:
|
||||
schedule:
|
||||
- cron: '10 0 1 * *' # 10 minutes after midnight on the first of every month
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bump:
|
||||
description: 'Bump the last digit of the version'
|
||||
required: false
|
||||
type: boolean
|
||||
version:
|
||||
description: 'Specific version to set'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- cron: "10 0 1 * *"
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
@@ -26,60 +13,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ssh-key: ${{ secrets.ACTION_PUSH_KEY }}
|
||||
|
||||
- run: |
|
||||
git config user.name 'github-actions[bot]'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
# Function to bump the version
|
||||
bump_version() {
|
||||
local version="$1"
|
||||
local IFS=.
|
||||
read -r major minor patch <<< "$version"
|
||||
patch=$((patch + 1))
|
||||
echo "$major.$minor.$patch-dev"
|
||||
}
|
||||
|
||||
# Determine the new version tag
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
BUMP_INPUT="${{ github.event.inputs.bump }}"
|
||||
SPECIFIC_VERSION="${{ github.event.inputs.version }}"
|
||||
|
||||
# Check if both bump and specific version are provided
|
||||
if [ "$BUMP_INPUT" = "true" ] && [ -n "$SPECIFIC_VERSION" ]; then
|
||||
echo "::error::Error: Cannot specify both bump and specific version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$SPECIFIC_VERSION" ]; then
|
||||
TAG_NAME="$SPECIFIC_VERSION-dev"
|
||||
elif [ "$BUMP_INPUT" = "true" ]; then
|
||||
CURRENT_VERSION=$(grep '"version":' package.json | awk -F\" '{print $4}')
|
||||
TAG_NAME=$(bump_version "$CURRENT_VERSION")
|
||||
else
|
||||
echo "No version bump or specific version provided for manual dispatch."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
fi
|
||||
|
||||
echo "New version tag: $TAG_NAME"
|
||||
|
||||
# Update version in files
|
||||
gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
|
||||
date="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=$date '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${TAG_NAME}"
|
||||
git tag ${TAG_NAME}
|
||||
git push --atomic origin master refs/tags/${TAG_NAME}
|
||||
|
||||
# Run the 'preview-image' workflow if run this workflow manually
|
||||
# For more information, please see the: https://docs.github.com/en/actions/security-guides/automatic-token-authentication
|
||||
if [ "$BUMP_INPUT" = "true" ] || [ -n "$SPECIFIC_VERSION" ]; then
|
||||
gh workflow run preview-image.yml --ref $TAG_NAME
|
||||
fi
|
||||
git commit -m "Snapshot: ${date}"
|
||||
git tag $date
|
||||
git push --atomic origin master refs/tags/$date
|
||||
|
||||
120
.github/workflows/preview-image.yml
vendored
120
.github/workflows/preview-image.yml
vendored
@@ -1,120 +0,0 @@
|
||||
name: Preview Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*-dev'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dockerRepository:
|
||||
description: 'Docker repository'
|
||||
required: true
|
||||
default: 'preview'
|
||||
type: choice
|
||||
options:
|
||||
- preview
|
||||
- redash
|
||||
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
|
||||
jobs:
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
- name: Skip?
|
||||
id: skip-check
|
||||
run: |
|
||||
if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then
|
||||
echo 'Docker user is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
echo skip=false >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.push.after }}
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
.ci/update_version
|
||||
VERSION_TAG=$(jq -r .version package.json)
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# TODO: We can use GitHub Actions's matrix option to reduce the build time.
|
||||
- name: Build and push preview image to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha,scope=multi-platform
|
||||
cache-to: type=gha,mode=max,scope=multi-platform
|
||||
platforms: linux/amd64,linux/arm64
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: Build and push release image to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha,scope=multi-platform
|
||||
cache-to: type=gha,mode=max,scope=multi-platform
|
||||
platforms: linux/amd64,linux/arm64
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
36
.github/workflows/restyled.yml
vendored
36
.github/workflows/restyled.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Restyled
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
restyled:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- uses: restyled-io/actions/setup@v4
|
||||
- id: restyler
|
||||
uses: restyled-io/actions/run@v4
|
||||
with:
|
||||
fail-on-differences: true
|
||||
|
||||
- if: |
|
||||
!cancelled() &&
|
||||
steps.restyler.outputs.success == 'true' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
base: ${{ steps.restyler.outputs.restyled-base }}
|
||||
branch: ${{ steps.restyler.outputs.restyled-head }}
|
||||
title: ${{ steps.restyler.outputs.restyled-title }}
|
||||
body: ${{ steps.restyler.outputs.restyled-body }}
|
||||
labels: "restyled"
|
||||
reviewers: ${{ github.event.pull_request.user.login }}
|
||||
delete-branch: true
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -17,7 +17,6 @@ client/dist
|
||||
_build
|
||||
.vscode
|
||||
.env
|
||||
.tool-versions
|
||||
|
||||
dump.rdb
|
||||
|
||||
|
||||
@@ -38,9 +38,7 @@ request_review: author
|
||||
#
|
||||
# These can be used to tell other automation to avoid our PRs.
|
||||
#
|
||||
labels:
|
||||
- restyled
|
||||
- "Skip CI"
|
||||
labels: ["Skip CI"]
|
||||
|
||||
# Labels to ignore
|
||||
#
|
||||
@@ -52,13 +50,13 @@ labels:
|
||||
# Restylers to run, and how
|
||||
restylers:
|
||||
- name: black
|
||||
image: restyled/restyler-black:v24.4.2
|
||||
image: restyled/restyler-black:v19.10b0
|
||||
include:
|
||||
- redash
|
||||
- tests
|
||||
- migrations/versions
|
||||
- name: prettier
|
||||
image: restyled/restyler-prettier:v3.3.2-2
|
||||
image: restyled/restyler-prettier:v1.19.1-2
|
||||
command:
|
||||
- prettier
|
||||
- --write
|
||||
|
||||
56
Dockerfile
56
Dockerfile
@@ -1,6 +1,6 @@
|
||||
FROM node:18-bookworm AS frontend-builder
|
||||
FROM node:16.20.1-bookworm as frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
RUN npm install --global --force yarn@1.22.19
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
@@ -14,30 +14,18 @@ USER redash
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
COPY --chown=redash scripts /frontend/scripts
|
||||
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
# Avoid issues caused by lags in disk and network I/O speeds when working on top of QEMU emulation for multi-platform image building.
|
||||
RUN yarn config set network-timeout 300000
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN <<EOF
|
||||
if [ "x$skip_frontend_build" = "x" ]; then
|
||||
yarn build
|
||||
else
|
||||
mkdir -p /frontend/client/dist
|
||||
touch /frontend/client/dist/multi_org.html
|
||||
touch /frontend/client/dist/index.html
|
||||
fi
|
||||
EOF
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
|
||||
FROM python:3.10-slim-bookworm
|
||||
FROM python:3.8-slim-bookworm
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
@@ -75,34 +63,28 @@ RUN apt-get update && \
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN <<EOF
|
||||
if [ "$TARGETPLATFORM" = "linux/amd64" ]; then
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg
|
||||
curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list
|
||||
apt-get update
|
||||
ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip
|
||||
chmod 600 /tmp/simba_odbc.zip
|
||||
unzip /tmp/simba_odbc.zip -d /tmp/simba
|
||||
dpkg -i /tmp/simba/*.deb
|
||||
printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini
|
||||
rm /tmp/simba_odbc.zip
|
||||
rm -rf /tmp/simba
|
||||
fi
|
||||
EOF
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg \
|
||||
&& curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/simba \
|
||||
&& dpkg -i /tmp/simba/*.deb \
|
||||
&& printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
|
||||
&& rm /tmp/simba_odbc.zip \
|
||||
&& rm -rf /tmp/simba; fi
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV POETRY_VERSION=1.8.3
|
||||
ENV POETRY_VERSION=1.6.1
|
||||
ENV POETRY_HOME=/etc/poetry
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
# Avoid crashes, including corrupted cache artifacts, when building multi-platform images with GitHub Actions.
|
||||
RUN /etc/poetry/bin/poetry cache clear pypi --all
|
||||
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
|
||||
ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi"
|
||||
|
||||
24
Makefile
24
Makefile
@@ -1,14 +1,10 @@
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
|
||||
up:
|
||||
docker compose up -d redis postgres --remove-orphans
|
||||
docker compose exec -u postgres postgres psql postgres --csv \
|
||||
-1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \
|
||||
| grep -q "organizations" || make create_database
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build --remove-orphans
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
@@ -21,21 +17,7 @@ create_database: .env
|
||||
docker compose run server create_db
|
||||
|
||||
clean:
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
docker compose rm --stop --force
|
||||
docker compose --project-name cypress rm --stop --force
|
||||
docker image rm --force \
|
||||
cypress-server:latest cypress-worker:latest cypress-scheduler:latest \
|
||||
redash-server:latest redash-worker:latest redash-scheduler:latest
|
||||
docker container prune --force
|
||||
docker image prune --force
|
||||
docker volume prune --force
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
docker compose down && docker compose rm
|
||||
|
||||
down:
|
||||
docker compose down
|
||||
|
||||
@@ -84,7 +84,6 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Python
|
||||
- Qubole
|
||||
- Rockset
|
||||
- RisingWave
|
||||
- Salesforce
|
||||
- ScyllaDB
|
||||
- Shell Scripts
|
||||
|
||||
@@ -67,7 +67,7 @@ help() {
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via debugpy"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "create_db -- create database tables"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
|
||||
BIN
client/app/assets/images/db-logos/qubole.png
Normal file
BIN
client/app/assets/images/db-logos/qubole.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.4 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 9.7 KiB |
@@ -22,7 +22,7 @@ function BeaconConsent() {
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = (confirm) => {
|
||||
const confirmConsent = confirm => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
@@ -47,8 +47,7 @@ function BeaconConsent() {
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}
|
||||
>
|
||||
bordered={false}>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
@@ -67,7 +66,8 @@ function BeaconConsent() {
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the <Link href="settings/general">Settings</Link> page.
|
||||
You can change this setting anytime from the{" "}
|
||||
<Link href="settings/organization">Organization Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -12,7 +12,6 @@ import { wrap as wrapDialog, DialogPropType } from "@/components/DialogWrapper";
|
||||
import QuerySelector from "@/components/QuerySelector";
|
||||
import { Query } from "@/services/query";
|
||||
import { useUniqueId } from "@/lib/hooks/useUniqueId";
|
||||
import "./EditParameterSettingsDialog.less";
|
||||
|
||||
const { Option } = Select;
|
||||
const formItemProps = { labelCol: { span: 6 }, wrapperCol: { span: 16 } };
|
||||
@@ -27,7 +26,7 @@ function isTypeDateRange(type) {
|
||||
|
||||
function joinExampleList(multiValuesOptions) {
|
||||
const { prefix, suffix } = multiValuesOptions;
|
||||
return ["value1", "value2", "value3"].map((value) => `${prefix}${value}${suffix}`).join(",");
|
||||
return ["value1", "value2", "value3"].map(value => `${prefix}${value}${suffix}`).join(",");
|
||||
}
|
||||
|
||||
function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
@@ -55,7 +54,7 @@ function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
|
||||
return (
|
||||
<Form.Item required label="Keyword" help={helpText} validateStatus={validateStatus} {...formItemProps}>
|
||||
<Input onChange={(e) => onChange(e.target.value)} autoFocus />
|
||||
<Input onChange={e => onChange(e.target.value)} autoFocus />
|
||||
</Form.Item>
|
||||
);
|
||||
}
|
||||
@@ -72,8 +71,6 @@ function EditParameterSettingsDialog(props) {
|
||||
const [param, setParam] = useState(clone(props.parameter));
|
||||
const [isNameValid, setIsNameValid] = useState(true);
|
||||
const [initialQuery, setInitialQuery] = useState();
|
||||
const [userInput, setUserInput] = useState(param.regex || "");
|
||||
const [isValidRegex, setIsValidRegex] = useState(true);
|
||||
|
||||
const isNew = !props.parameter.name;
|
||||
|
||||
@@ -117,17 +114,6 @@ function EditParameterSettingsDialog(props) {
|
||||
|
||||
const paramFormId = useUniqueId("paramForm");
|
||||
|
||||
const handleRegexChange = (e) => {
|
||||
setUserInput(e.target.value);
|
||||
try {
|
||||
new RegExp(e.target.value);
|
||||
setParam({ ...param, regex: e.target.value });
|
||||
setIsValidRegex(true);
|
||||
} catch (error) {
|
||||
setIsValidRegex(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
{...props.dialog.props}
|
||||
@@ -143,17 +129,15 @@ function EditParameterSettingsDialog(props) {
|
||||
disabled={!isFulfilled()}
|
||||
type="primary"
|
||||
form={paramFormId}
|
||||
data-test="SaveParameterSettings"
|
||||
>
|
||||
data-test="SaveParameterSettings">
|
||||
{isNew ? "Add Parameter" : "OK"}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
]}>
|
||||
<Form layout="horizontal" onFinish={onConfirm} id={paramFormId}>
|
||||
{isNew && (
|
||||
<NameInput
|
||||
name={param.name}
|
||||
onChange={(name) => setParam({ ...param, name })}
|
||||
onChange={name => setParam({ ...param, name })}
|
||||
setValidation={setIsNameValid}
|
||||
existingNames={props.existingParams}
|
||||
type={param.type}
|
||||
@@ -162,16 +146,15 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item required label="Title" {...formItemProps}>
|
||||
<Input
|
||||
value={isNull(param.title) ? getDefaultTitle(param.name) : param.title}
|
||||
onChange={(e) => setParam({ ...param, title: e.target.value })}
|
||||
onChange={e => setParam({ ...param, title: e.target.value })}
|
||||
data-test="ParameterTitleInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item label="Type" {...formItemProps}>
|
||||
<Select value={param.type} onChange={(type) => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Select value={param.type} onChange={type => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Option value="text" data-test="TextParameterTypeOption">
|
||||
Text
|
||||
</Option>
|
||||
<Option value="text-pattern">Text Pattern</Option>
|
||||
<Option value="number" data-test="NumberParameterTypeOption">
|
||||
Number
|
||||
</Option>
|
||||
@@ -197,26 +180,12 @@ function EditParameterSettingsDialog(props) {
|
||||
<Option value="datetime-range-with-seconds">Date and Time Range (with seconds)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
{param.type === "text-pattern" && (
|
||||
<Form.Item
|
||||
label="Regex"
|
||||
help={!isValidRegex ? "Invalid Regex Pattern" : "Valid Regex Pattern"}
|
||||
{...formItemProps}
|
||||
>
|
||||
<Input
|
||||
value={userInput}
|
||||
onChange={handleRegexChange}
|
||||
className={!isValidRegex ? "input-error" : ""}
|
||||
data-test="RegexPatternInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
{param.type === "enum" && (
|
||||
<Form.Item label="Values" help="Dropdown list values (newline delimited)" {...formItemProps}>
|
||||
<Input.TextArea
|
||||
rows={3}
|
||||
value={param.enumOptions}
|
||||
onChange={(e) => setParam({ ...param, enumOptions: e.target.value })}
|
||||
onChange={e => setParam({ ...param, enumOptions: e.target.value })}
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
@@ -224,7 +193,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item label="Query" help="Select query to load dropdown values from" {...formItemProps}>
|
||||
<QuerySelector
|
||||
selectedQuery={initialQuery}
|
||||
onChange={(q) => setParam({ ...param, queryId: q && q.id })}
|
||||
onChange={q => setParam({ ...param, queryId: q && q.id })}
|
||||
type="select"
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -233,7 +202,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item className="m-b-0" label=" " colon={false} {...formItemProps}>
|
||||
<Checkbox
|
||||
defaultChecked={!!param.multiValuesOptions}
|
||||
onChange={(e) =>
|
||||
onChange={e =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: e.target.checked
|
||||
@@ -245,8 +214,7 @@ function EditParameterSettingsDialog(props) {
|
||||
: null,
|
||||
})
|
||||
}
|
||||
data-test="AllowMultipleValuesCheckbox"
|
||||
>
|
||||
data-test="AllowMultipleValuesCheckbox">
|
||||
Allow multiple values
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
@@ -259,11 +227,10 @@ function EditParameterSettingsDialog(props) {
|
||||
Placed in query as: <code>{joinExampleList(param.multiValuesOptions)}</code>
|
||||
</React.Fragment>
|
||||
}
|
||||
{...formItemProps}
|
||||
>
|
||||
{...formItemProps}>
|
||||
<Select
|
||||
value={param.multiValuesOptions.prefix}
|
||||
onChange={(quoteOption) =>
|
||||
onChange={quoteOption =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: {
|
||||
@@ -273,8 +240,7 @@ function EditParameterSettingsDialog(props) {
|
||||
},
|
||||
})
|
||||
}
|
||||
data-test="QuotationSelect"
|
||||
>
|
||||
data-test="QuotationSelect">
|
||||
<Option value="">None (default)</Option>
|
||||
<Option value="'">Single Quotation Mark</Option>
|
||||
<Option value={'"'} data-test="DoubleQuotationMarkOption">
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
.input-error {
|
||||
border-color: red !important;
|
||||
}
|
||||
@@ -101,7 +101,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
}
|
||||
|
||||
loadIframe = (url) => {
|
||||
loadIframe = url => {
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
this.setState({ loading: true, error: false });
|
||||
|
||||
@@ -116,8 +116,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
};
|
||||
|
||||
onPostMessageReceived = (event) => {
|
||||
if (!some(allowedDomains, (domain) => startsWith(event.origin, domain))) {
|
||||
onPostMessageReceived = event => {
|
||||
if (!some(allowedDomains, domain => startsWith(event.origin, domain))) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -134,7 +134,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
return helpTriggerType ? helpTriggerType[0] : this.props.href;
|
||||
};
|
||||
|
||||
openDrawer = (e) => {
|
||||
openDrawer = e => {
|
||||
// keep "open in new tab" behavior
|
||||
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
|
||||
e.preventDefault();
|
||||
@@ -144,7 +144,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
}
|
||||
};
|
||||
|
||||
closeDrawer = (event) => {
|
||||
closeDrawer = event => {
|
||||
if (event) {
|
||||
event.preventDefault();
|
||||
}
|
||||
@@ -161,7 +161,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
|
||||
const className = cx("help-trigger", this.props.className);
|
||||
const url = this.state.currentUrl;
|
||||
const isAllowedDomain = some(allowedDomains, (domain) => startsWith(url || targetUrl, domain));
|
||||
const isAllowedDomain = some(allowedDomains, domain => startsWith(url || targetUrl, domain));
|
||||
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
|
||||
|
||||
return (
|
||||
@@ -180,15 +180,13 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
)}
|
||||
</>
|
||||
) : null
|
||||
}
|
||||
>
|
||||
}>
|
||||
<Link
|
||||
href={url || this.getUrl()}
|
||||
className={className}
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}
|
||||
>
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}>
|
||||
{this.props.children}
|
||||
</Link>
|
||||
</Tooltip>
|
||||
@@ -199,8 +197,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
visible={this.state.visible}
|
||||
className={cx("help-drawer", drawerClassName)}
|
||||
destroyOnClose
|
||||
width={400}
|
||||
>
|
||||
width={400}>
|
||||
<div className="drawer-wrapper">
|
||||
<div className="drawer-menu">
|
||||
{url && (
|
||||
|
||||
@@ -33,10 +33,10 @@ export const MappingType = {
|
||||
};
|
||||
|
||||
export function parameterMappingsToEditableMappings(mappings, parameters, existingParameterNames = []) {
|
||||
return map(mappings, (mapping) => {
|
||||
return map(mappings, mapping => {
|
||||
const result = extend({}, mapping);
|
||||
const alreadyExists = includes(existingParameterNames, mapping.mapTo);
|
||||
result.param = find(parameters, (p) => p.name === mapping.name);
|
||||
result.param = find(parameters, p => p.name === mapping.name);
|
||||
switch (mapping.type) {
|
||||
case ParameterMappingType.DashboardLevel:
|
||||
result.type = alreadyExists ? MappingType.DashboardMapToExisting : MappingType.DashboardAddNew;
|
||||
@@ -62,7 +62,7 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
map(
|
||||
// convert to map
|
||||
mappings,
|
||||
(mapping) => {
|
||||
mapping => {
|
||||
const result = extend({}, mapping);
|
||||
switch (mapping.type) {
|
||||
case MappingType.DashboardAddNew:
|
||||
@@ -95,11 +95,11 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
export function synchronizeWidgetTitles(sourceMappings, widgets) {
|
||||
const affectedWidgets = [];
|
||||
|
||||
each(sourceMappings, (sourceMapping) => {
|
||||
each(sourceMappings, sourceMapping => {
|
||||
if (sourceMapping.type === ParameterMappingType.DashboardLevel) {
|
||||
each(widgets, (widget) => {
|
||||
each(widgets, widget => {
|
||||
const widgetMappings = widget.options.parameterMappings;
|
||||
each(widgetMappings, (widgetMapping) => {
|
||||
each(widgetMappings, widgetMapping => {
|
||||
// check if mapped to the same dashboard-level parameter
|
||||
if (
|
||||
widgetMapping.type === ParameterMappingType.DashboardLevel &&
|
||||
@@ -140,7 +140,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
className: "form-item",
|
||||
};
|
||||
|
||||
updateSourceType = (type) => {
|
||||
updateSourceType = type => {
|
||||
let {
|
||||
mapping: { mapTo },
|
||||
} = this.props;
|
||||
@@ -155,7 +155,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
this.updateParamMapping({ type, mapTo });
|
||||
};
|
||||
|
||||
updateParamMapping = (update) => {
|
||||
updateParamMapping = update => {
|
||||
const { onChange, mapping } = this.props;
|
||||
const newMapping = extend({}, mapping, update);
|
||||
if (newMapping.value !== mapping.value) {
|
||||
@@ -175,7 +175,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
renderMappingTypeSelector() {
|
||||
const noExisting = isEmpty(this.props.existingParamNames);
|
||||
return (
|
||||
<Radio.Group value={this.props.mapping.type} onChange={(e) => this.updateSourceType(e.target.value)}>
|
||||
<Radio.Group value={this.props.mapping.type} onChange={e => this.updateSourceType(e.target.value)}>
|
||||
<Radio className="radio" value={MappingType.DashboardAddNew} data-test="NewDashboardParameterOption">
|
||||
New dashboard parameter
|
||||
</Radio>
|
||||
@@ -205,16 +205,16 @@ export class ParameterMappingInput extends React.Component {
|
||||
<Input
|
||||
value={mapTo}
|
||||
aria-label="Parameter name (key)"
|
||||
onChange={(e) => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
onChange={e => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderDashboardMapToExisting() {
|
||||
const { mapping, existingParamNames } = this.props;
|
||||
const options = map(existingParamNames, (paramName) => ({ label: paramName, value: paramName }));
|
||||
const options = map(existingParamNames, paramName => ({ label: paramName, value: paramName }));
|
||||
|
||||
return <Select value={mapping.mapTo} onChange={(mapTo) => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
return <Select value={mapping.mapTo} onChange={mapTo => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
}
|
||||
|
||||
renderStaticValue() {
|
||||
@@ -226,8 +226,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
enumOptions={mapping.param.enumOptions}
|
||||
queryId={mapping.param.queryId}
|
||||
parameter={mapping.param}
|
||||
onSelect={(value) => this.updateParamMapping({ value })}
|
||||
regex={mapping.param.regex}
|
||||
onSelect={value => this.updateParamMapping({ value })}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -285,12 +284,12 @@ class MappingEditor extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
onVisibleChange = (visible) => {
|
||||
onVisibleChange = visible => {
|
||||
if (visible) this.show();
|
||||
else this.hide();
|
||||
};
|
||||
|
||||
onChange = (mapping) => {
|
||||
onChange = mapping => {
|
||||
let inputError = null;
|
||||
|
||||
if (mapping.type === MappingType.DashboardAddNew) {
|
||||
@@ -352,8 +351,7 @@ class MappingEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderContent()}
|
||||
visible={visible}
|
||||
onVisibleChange={this.onVisibleChange}
|
||||
>
|
||||
onVisibleChange={this.onVisibleChange}>
|
||||
<Button size="small" type="dashed" data-test={`EditParamMappingButton-${mapping.param.name}`}>
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -378,14 +376,14 @@ class TitleEditor extends React.Component {
|
||||
title: "", // will be set on editing
|
||||
};
|
||||
|
||||
onPopupVisibleChange = (showPopup) => {
|
||||
onPopupVisibleChange = showPopup => {
|
||||
this.setState({
|
||||
showPopup,
|
||||
title: showPopup ? this.getMappingTitle() : "",
|
||||
});
|
||||
};
|
||||
|
||||
onEditingTitleChange = (event) => {
|
||||
onEditingTitleChange = event => {
|
||||
this.setState({ title: event.target.value });
|
||||
};
|
||||
|
||||
@@ -462,8 +460,7 @@ class TitleEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderPopover()}
|
||||
visible={this.state.showPopup}
|
||||
onVisibleChange={this.onPopupVisibleChange}
|
||||
>
|
||||
onVisibleChange={this.onPopupVisibleChange}>
|
||||
<Button size="small" type="dashed">
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -511,7 +508,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
// just to be safe, array or object
|
||||
if (typeof value === "object") {
|
||||
return map(value, (v) => this.getStringValue(v)).join(", ");
|
||||
return map(value, v => this.getStringValue(v)).join(", ");
|
||||
}
|
||||
|
||||
// rest
|
||||
@@ -577,7 +574,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
render() {
|
||||
const { existingParams } = this.props; // eslint-disable-line react/prop-types
|
||||
const dataSource = this.props.mappings.map((mapping) => ({ mapping }));
|
||||
const dataSource = this.props.mappings.map(mapping => ({ mapping }));
|
||||
|
||||
return (
|
||||
<div className="parameters-mapping-list">
|
||||
@@ -586,11 +583,11 @@ export class ParameterMappingListInput extends React.Component {
|
||||
title="Title"
|
||||
dataIndex="mapping"
|
||||
key="title"
|
||||
render={(mapping) => (
|
||||
render={mapping => (
|
||||
<TitleEditor
|
||||
existingParams={existingParams}
|
||||
mapping={mapping}
|
||||
onChange={(newMapping) => this.updateParamMapping(mapping, newMapping)}
|
||||
onChange={newMapping => this.updateParamMapping(mapping, newMapping)}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
@@ -599,19 +596,19 @@ export class ParameterMappingListInput extends React.Component {
|
||||
dataIndex="mapping"
|
||||
key="keyword"
|
||||
className="keyword"
|
||||
render={(mapping) => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
render={mapping => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Default Value"
|
||||
dataIndex="mapping"
|
||||
key="value"
|
||||
render={(mapping) => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
render={mapping => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Value Source"
|
||||
dataIndex="mapping"
|
||||
key="source"
|
||||
render={(mapping) => {
|
||||
render={mapping => {
|
||||
const existingParamsNames = existingParams
|
||||
.filter(({ type }) => type === mapping.param.type) // exclude mismatching param types
|
||||
.map(({ name }) => name); // keep names only
|
||||
|
||||
@@ -9,12 +9,11 @@ import DateRangeParameter from "@/components/dynamic-parameters/DateRangeParamet
|
||||
import QueryBasedParameterInput from "./QueryBasedParameterInput";
|
||||
|
||||
import "./ParameterValueInput.less";
|
||||
import Tooltip from "./Tooltip";
|
||||
|
||||
const multipleValuesProps = {
|
||||
maxTagCount: 3,
|
||||
maxTagTextLength: 10,
|
||||
maxTagPlaceholder: (num) => `+${num.length} more`,
|
||||
maxTagPlaceholder: num => `+${num.length} more`,
|
||||
};
|
||||
|
||||
class ParameterValueInput extends React.Component {
|
||||
@@ -26,7 +25,6 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: PropTypes.any, // eslint-disable-line react/forbid-prop-types
|
||||
onSelect: PropTypes.func,
|
||||
className: PropTypes.string,
|
||||
regex: PropTypes.string,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
@@ -37,7 +35,6 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: null,
|
||||
onSelect: () => {},
|
||||
className: "",
|
||||
regex: "",
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
@@ -48,7 +45,7 @@ class ParameterValueInput extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
componentDidUpdate = (prevProps) => {
|
||||
componentDidUpdate = prevProps => {
|
||||
const { value, parameter } = this.props;
|
||||
// if value prop updated, reset dirty state
|
||||
if (prevProps.value !== value || prevProps.parameter !== parameter) {
|
||||
@@ -59,7 +56,7 @@ class ParameterValueInput extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onSelect = (value) => {
|
||||
onSelect = value => {
|
||||
const isDirty = !isEqual(value, this.props.value);
|
||||
this.setState({ value, isDirty });
|
||||
this.props.onSelect(value, isDirty);
|
||||
@@ -96,9 +93,9 @@ class ParameterValueInput extends React.Component {
|
||||
renderEnumInput() {
|
||||
const { enumOptions, parameter } = this.props;
|
||||
const { value } = this.state;
|
||||
const enumOptionsArray = enumOptions.split("\n").filter((v) => v !== "");
|
||||
const enumOptionsArray = enumOptions.split("\n").filter(v => v !== "");
|
||||
// Antd Select doesn't handle null in multiple mode
|
||||
const normalize = (val) => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
const normalize = val => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
|
||||
return (
|
||||
<SelectWithVirtualScroll
|
||||
@@ -106,7 +103,7 @@ class ParameterValueInput extends React.Component {
|
||||
mode={parameter.multiValuesOptions ? "multiple" : "default"}
|
||||
value={normalize(value)}
|
||||
onChange={this.onSelect}
|
||||
options={map(enumOptionsArray, (opt) => ({ label: String(opt), value: opt }))}
|
||||
options={map(enumOptionsArray, opt => ({ label: String(opt), value: opt }))}
|
||||
showSearch
|
||||
showArrow
|
||||
notFoundContent={isEmpty(enumOptionsArray) ? "No options available" : null}
|
||||
@@ -136,36 +133,18 @@ class ParameterValueInput extends React.Component {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
const normalize = (val) => (isNaN(val) ? undefined : val);
|
||||
const normalize = val => (isNaN(val) ? undefined : val);
|
||||
|
||||
return (
|
||||
<InputNumber
|
||||
className={className}
|
||||
value={normalize(value)}
|
||||
aria-label="Parameter number value"
|
||||
onChange={(val) => this.onSelect(normalize(val))}
|
||||
onChange={val => this.onSelect(normalize(val))}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextPatternInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
return (
|
||||
<React.Fragment>
|
||||
<Tooltip title={`Regex to match: ${this.props.regex}`} placement="right">
|
||||
<Input
|
||||
className={className}
|
||||
value={value}
|
||||
aria-label="Parameter text pattern value"
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
@@ -176,7 +155,7 @@ class ParameterValueInput extends React.Component {
|
||||
value={value}
|
||||
aria-label="Parameter text value"
|
||||
data-test="TextParamInput"
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
onChange={e => this.onSelect(e.target.value)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -198,8 +177,6 @@ class ParameterValueInput extends React.Component {
|
||||
return this.renderQueryBasedInput();
|
||||
case "number":
|
||||
return this.renderNumberInput();
|
||||
case "text-pattern":
|
||||
return this.renderTextPatternInput();
|
||||
default:
|
||||
return this.renderTextInput();
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import "./Parameters.less";
|
||||
|
||||
function updateUrl(parameters) {
|
||||
const params = extend({}, location.search);
|
||||
parameters.forEach((param) => {
|
||||
parameters.forEach(param => {
|
||||
extend(params, param.toUrlParams());
|
||||
});
|
||||
location.setSearch(params, true);
|
||||
@@ -43,7 +43,7 @@ export default class Parameters extends React.Component {
|
||||
appendSortableToParent: true,
|
||||
};
|
||||
|
||||
toCamelCase = (str) => {
|
||||
toCamelCase = str => {
|
||||
if (isEmpty(str)) {
|
||||
return "";
|
||||
}
|
||||
@@ -59,10 +59,10 @@ export default class Parameters extends React.Component {
|
||||
}
|
||||
const hideRegex = /hide_filter=([^&]+)/g;
|
||||
const matches = window.location.search.matchAll(hideRegex);
|
||||
this.hideValues = Array.from(matches, (match) => match[1]);
|
||||
this.hideValues = Array.from(matches, match => match[1]);
|
||||
}
|
||||
|
||||
componentDidUpdate = (prevProps) => {
|
||||
componentDidUpdate = prevProps => {
|
||||
const { parameters, disableUrlUpdate } = this.props;
|
||||
const parametersChanged = prevProps.parameters !== parameters;
|
||||
const disableUrlUpdateChanged = prevProps.disableUrlUpdate !== disableUrlUpdate;
|
||||
@@ -74,7 +74,7 @@ export default class Parameters extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
handleKeyDown = (e) => {
|
||||
handleKeyDown = e => {
|
||||
// Cmd/Ctrl/Alt + Enter
|
||||
if (e.keyCode === 13 && (e.ctrlKey || e.metaKey || e.altKey)) {
|
||||
e.stopPropagation();
|
||||
@@ -109,8 +109,8 @@ export default class Parameters extends React.Component {
|
||||
applyChanges = () => {
|
||||
const { onValuesChange, disableUrlUpdate } = this.props;
|
||||
this.setState(({ parameters }) => {
|
||||
const parametersWithPendingValues = parameters.filter((p) => p.hasPendingValue);
|
||||
forEach(parameters, (p) => p.applyPendingValue());
|
||||
const parametersWithPendingValues = parameters.filter(p => p.hasPendingValue);
|
||||
forEach(parameters, p => p.applyPendingValue());
|
||||
if (!disableUrlUpdate) {
|
||||
updateUrl(parameters);
|
||||
}
|
||||
@@ -121,7 +121,7 @@ export default class Parameters extends React.Component {
|
||||
|
||||
showParameterSettings = (parameter, index) => {
|
||||
const { onParametersEdit } = this.props;
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose((updated) => {
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose(updated => {
|
||||
this.setState(({ parameters }) => {
|
||||
const updatedParameter = extend(parameter, updated);
|
||||
parameters[index] = createParameter(updatedParameter, updatedParameter.parentQueryId);
|
||||
@@ -132,7 +132,7 @@ export default class Parameters extends React.Component {
|
||||
};
|
||||
|
||||
renderParameter(param, index) {
|
||||
if (this.hideValues.some((value) => this.toCamelCase(value) === this.toCamelCase(param.name))) {
|
||||
if (this.hideValues.some(value => this.toCamelCase(value) === this.toCamelCase(param.name))) {
|
||||
return null;
|
||||
}
|
||||
const { editable } = this.props;
|
||||
@@ -149,8 +149,7 @@ export default class Parameters extends React.Component {
|
||||
aria-label="Edit"
|
||||
onClick={() => this.showParameterSettings(param, index)}
|
||||
data-test={`ParameterSettings-${param.name}`}
|
||||
type="button"
|
||||
>
|
||||
type="button">
|
||||
<i className="fa fa-cog" aria-hidden="true" />
|
||||
</PlainButton>
|
||||
)}
|
||||
@@ -163,7 +162,6 @@ export default class Parameters extends React.Component {
|
||||
enumOptions={param.enumOptions}
|
||||
queryId={param.queryId}
|
||||
onSelect={(value, isDirty) => this.setPendingValue(param, value, isDirty)}
|
||||
regex={param.regex}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@@ -180,22 +178,20 @@ export default class Parameters extends React.Component {
|
||||
useDragHandle
|
||||
lockToContainerEdges
|
||||
helperClass="parameter-dragged"
|
||||
helperContainer={(containerEl) => (appendSortableToParent ? containerEl : document.body)}
|
||||
helperContainer={containerEl => (appendSortableToParent ? containerEl : document.body)}
|
||||
updateBeforeSortStart={this.onBeforeSortStart}
|
||||
onSortEnd={this.moveParameter}
|
||||
containerProps={{
|
||||
className: "parameter-container",
|
||||
onKeyDown: dirtyParamCount ? this.handleKeyDown : null,
|
||||
}}
|
||||
>
|
||||
}}>
|
||||
{parameters &&
|
||||
parameters.map((param, index) => (
|
||||
<SortableElement key={param.name} index={index}>
|
||||
<div
|
||||
className="parameter-block"
|
||||
data-editable={sortable || null}
|
||||
data-test={`ParameterBlock-${param.name}`}
|
||||
>
|
||||
data-test={`ParameterBlock-${param.name}`}>
|
||||
{sortable && <DragHandle data-test={`DragHandle-${param.name}`} />}
|
||||
{this.renderParameter(param, index)}
|
||||
</div>
|
||||
|
||||
@@ -65,7 +65,6 @@ export const Query = PropTypes.shape({
|
||||
|
||||
export const AlertOptions = PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.oneOf(["first", "min", "max"]),
|
||||
op: PropTypes.oneOf([">", ">=", "<", "<=", "==", "!="]),
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
custom_subject: PropTypes.string,
|
||||
@@ -84,7 +83,6 @@ export const Alert = PropTypes.shape({
|
||||
query: Query,
|
||||
options: PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.string,
|
||||
op: PropTypes.string,
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
}).isRequired,
|
||||
|
||||
@@ -148,9 +148,7 @@ function EditVisualizationDialog({ dialog, visualization, query, queryResult })
|
||||
|
||||
function dismiss() {
|
||||
const optionsChanged = !isEqual(options, defaultState.originalOptions);
|
||||
confirmDialogClose(nameChanged || optionsChanged)
|
||||
.then(dialog.dismiss)
|
||||
.catch(() => {});
|
||||
confirmDialogClose(nameChanged || optionsChanged).then(dialog.dismiss);
|
||||
}
|
||||
|
||||
// When editing existing visualization chart type selector is disabled, so add only existing visualization's
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<base href="{{base_href}}" />
|
||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||
<script src="<%= htmlWebpackPlugin.options.staticPath %>unsupportedRedirect.js" async></script>
|
||||
<script src="/static/unsupportedRedirect.js" async></script>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/images/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/static/images/favicon-96x96.png" />
|
||||
|
||||
@@ -16,7 +16,6 @@ import MenuButton from "./components/MenuButton";
|
||||
import AlertView from "./AlertView";
|
||||
import AlertEdit from "./AlertEdit";
|
||||
import AlertNew from "./AlertNew";
|
||||
import notifications from "@/services/notifications";
|
||||
|
||||
const MODES = {
|
||||
NEW: 0,
|
||||
@@ -65,7 +64,6 @@ class Alert extends React.Component {
|
||||
this.setState({
|
||||
alert: {
|
||||
options: {
|
||||
selector: "first",
|
||||
op: ">",
|
||||
value: 1,
|
||||
muted: false,
|
||||
@@ -77,7 +75,7 @@ class Alert extends React.Component {
|
||||
} else {
|
||||
const { alertId } = this.props;
|
||||
AlertService.get({ id: alertId })
|
||||
.then((alert) => {
|
||||
.then(alert => {
|
||||
if (this._isMounted) {
|
||||
const canEdit = currentUser.canEdit(alert);
|
||||
|
||||
@@ -95,7 +93,7 @@ class Alert extends React.Component {
|
||||
this.onQuerySelected(alert.query);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
.catch(error => {
|
||||
if (this._isMounted) {
|
||||
this.props.onError(error);
|
||||
}
|
||||
@@ -114,7 +112,7 @@ class Alert extends React.Component {
|
||||
alert.rearm = pendingRearm || null;
|
||||
|
||||
return AlertService.save(alert)
|
||||
.then((alert) => {
|
||||
.then(alert => {
|
||||
notification.success("Saved.");
|
||||
navigateTo(`alerts/${alert.id}`, true);
|
||||
this.setState({ alert, mode: MODES.VIEW });
|
||||
@@ -124,7 +122,7 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
onQuerySelected = (query) => {
|
||||
onQuerySelected = query => {
|
||||
this.setState(({ alert }) => ({
|
||||
alert: Object.assign(alert, { query }),
|
||||
queryResult: null,
|
||||
@@ -132,7 +130,7 @@ class Alert extends React.Component {
|
||||
|
||||
if (query) {
|
||||
// get cached result for column names and values
|
||||
new QueryService(query).getQueryResultPromise().then((queryResult) => {
|
||||
new QueryService(query).getQueryResultPromise().then(queryResult => {
|
||||
if (this._isMounted) {
|
||||
this.setState({ queryResult });
|
||||
let { column } = this.state.alert.options;
|
||||
@@ -148,18 +146,18 @@ class Alert extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onNameChange = (name) => {
|
||||
onNameChange = name => {
|
||||
const { alert } = this.state;
|
||||
this.setState({
|
||||
alert: Object.assign(alert, { name }),
|
||||
});
|
||||
};
|
||||
|
||||
onRearmChange = (pendingRearm) => {
|
||||
onRearmChange = pendingRearm => {
|
||||
this.setState({ pendingRearm });
|
||||
};
|
||||
|
||||
setAlertOptions = (obj) => {
|
||||
setAlertOptions = obj => {
|
||||
const { alert } = this.state;
|
||||
const options = { ...alert.options, ...obj };
|
||||
this.setState({
|
||||
@@ -179,17 +177,6 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
evaluate = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.evaluate(alert)
|
||||
.then(() => {
|
||||
notification.success("Alert evaluated. Refresh page for updated status.");
|
||||
})
|
||||
.catch(() => {
|
||||
notifications.error("Failed to evaluate alert.");
|
||||
});
|
||||
};
|
||||
|
||||
mute = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.mute(alert)
|
||||
@@ -236,14 +223,7 @@ class Alert extends React.Component {
|
||||
const { queryResult, mode, canEdit, pendingRearm } = this.state;
|
||||
|
||||
const menuButton = (
|
||||
<MenuButton
|
||||
doDelete={this.delete}
|
||||
muted={muted}
|
||||
mute={this.mute}
|
||||
unmute={this.unmute}
|
||||
canEdit={canEdit}
|
||||
evaluate={this.evaluate}
|
||||
/>
|
||||
<MenuButton doDelete={this.delete} muted={muted} mute={this.mute} unmute={this.unmute} canEdit={canEdit} />
|
||||
);
|
||||
|
||||
const commonProps = {
|
||||
@@ -278,7 +258,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/new",
|
||||
title: "New Alert",
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -286,7 +266,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId",
|
||||
title: "Alert",
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -294,6 +274,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId/edit",
|
||||
title: "Alert",
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -68,23 +68,13 @@ export default class AlertView extends React.Component {
|
||||
<>
|
||||
<Title name={name} alert={alert}>
|
||||
<DynamicComponent name="AlertView.HeaderExtra" alert={alert} />
|
||||
{canEdit ? (
|
||||
<>
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</>
|
||||
) : (
|
||||
<Tooltip title="You do not have sufficient permissions to edit this alert">
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</Tooltip>
|
||||
)}
|
||||
<Tooltip title={canEdit ? "" : "You do not have sufficient permissions to edit this alert"}>
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</Tooltip>
|
||||
</Title>
|
||||
<div className="bg-white tiled p-20">
|
||||
<Grid.Row type="flex" gutter={16}>
|
||||
|
||||
@@ -54,74 +54,23 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
return null;
|
||||
})();
|
||||
|
||||
let columnHint;
|
||||
|
||||
if (alertOptions.selector === "first") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "max") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Max column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.max(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "min") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Min column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.min(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
}
|
||||
const columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
|
||||
return (
|
||||
<div data-test="Criteria">
|
||||
<div className="input-title">
|
||||
<span className="input-label">Selector</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.selector}
|
||||
onChange={(selector) => onChange({ selector })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 80 }}
|
||||
>
|
||||
<Select.Option value="first" label="first">
|
||||
first
|
||||
</Select.Option>
|
||||
<Select.Option value="min" label="min">
|
||||
min
|
||||
</Select.Option>
|
||||
<Select.Option value="max" label="max">
|
||||
max
|
||||
</Select.Option>
|
||||
</Select>
|
||||
) : (
|
||||
<DisabledInput minWidth={60}>{alertOptions.selector}</DisabledInput>
|
||||
)}
|
||||
</div>
|
||||
<div className="input-title">
|
||||
<span className="input-label">Value column</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.column}
|
||||
onChange={(column) => onChange({ column })}
|
||||
onChange={column => onChange({ column })}
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ minWidth: 100 }}
|
||||
>
|
||||
{columnNames.map((name) => (
|
||||
style={{ minWidth: 100 }}>
|
||||
{columnNames.map(name => (
|
||||
<Select.Option key={name}>{name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
@@ -134,11 +83,10 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.op}
|
||||
onChange={(op) => onChange({ op })}
|
||||
onChange={op => onChange({ op })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 55 }}
|
||||
>
|
||||
style={{ width: 55 }}>
|
||||
<Select.Option value=">" label={CONDITIONS[">"]}>
|
||||
{CONDITIONS[">"]} greater than
|
||||
</Select.Option>
|
||||
@@ -177,7 +125,7 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
id="threshold-criterion"
|
||||
style={{ width: 90 }}
|
||||
value={alertOptions.value}
|
||||
onChange={(e) => onChange({ value: e.target.value })}
|
||||
onChange={e => onChange({ value: e.target.value })}
|
||||
/>
|
||||
) : (
|
||||
<DisabledInput minWidth={50}>{alertOptions.value}</DisabledInput>
|
||||
|
||||
@@ -11,7 +11,7 @@ import LoadingOutlinedIcon from "@ant-design/icons/LoadingOutlined";
|
||||
import EllipsisOutlinedIcon from "@ant-design/icons/EllipsisOutlined";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate, muted }) {
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const execute = useCallback(action => {
|
||||
@@ -55,9 +55,6 @@ export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate,
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={confirmDelete}>Delete</PlainButton>
|
||||
</Menu.Item>
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={() => execute(evaluate)}>Evaluate</PlainButton>
|
||||
</Menu.Item>
|
||||
</Menu>
|
||||
}>
|
||||
<Button aria-label="More actions">
|
||||
@@ -72,7 +69,6 @@ MenuButton.propTypes = {
|
||||
canEdit: PropTypes.bool.isRequired,
|
||||
mute: PropTypes.func.isRequired,
|
||||
unmute: PropTypes.func.isRequired,
|
||||
evaluate: PropTypes.func.isRequired,
|
||||
muted: PropTypes.bool,
|
||||
};
|
||||
|
||||
|
||||
@@ -118,9 +118,28 @@ class ShareDashboardDialog extends React.Component {
|
||||
/>
|
||||
</Form.Item>
|
||||
{dashboard.public_url && (
|
||||
<Form.Item label="Secret address" {...this.formItemProps}>
|
||||
<InputWithCopy value={dashboard.public_url} data-test="SecretAddress" />
|
||||
</Form.Item>
|
||||
<>
|
||||
<Form.Item>
|
||||
<Alert
|
||||
message={
|
||||
<div>
|
||||
Custom rule for hiding filter components when sharing links:
|
||||
<br />
|
||||
You can hide filter components by appending `&hide_filter={"{{"} component_name{"}}"}` to the
|
||||
sharing URL.
|
||||
<br />
|
||||
Example: http://{"{{"}ip{"}}"}:{"{{"}port{"}}"}/public/dashboards/{"{{"}id{"}}"}
|
||||
?p_country=ghana&p_site=10&hide_filter=country
|
||||
</div>
|
||||
}
|
||||
type="warning"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item label="Secret address" {...this.formItemProps}>
|
||||
<InputWithCopy value={dashboard.public_url} data-test="SecretAddress" />
|
||||
</Form.Item>
|
||||
</>
|
||||
)}
|
||||
</Form>
|
||||
</Modal>
|
||||
|
||||
@@ -31,8 +31,7 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
<Link
|
||||
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
rel="noopener noreferrer">
|
||||
Read more
|
||||
</Link>
|
||||
.
|
||||
@@ -44,7 +43,7 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
|
||||
function EmailNotVerifiedAlert() {
|
||||
const verifyEmail = () => {
|
||||
axios.post("verification_email/").then((data) => {
|
||||
axios.post("verification_email/").then(data => {
|
||||
notification.success(data.message);
|
||||
});
|
||||
};
|
||||
@@ -101,6 +100,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/",
|
||||
title: "Redash",
|
||||
render: (pageProps) => <Home {...pageProps} />,
|
||||
render: pageProps => <Home {...pageProps} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -9,7 +9,6 @@ import QueryControlDropdown from "@/components/EditVisualizationButton/QueryCont
|
||||
import EditVisualizationButton from "@/components/EditVisualizationButton";
|
||||
import useQueryResultData from "@/lib/useQueryResultData";
|
||||
import { durationHumanize, pluralize, prettySize } from "@/lib/utils";
|
||||
import { isUndefined } from "lodash";
|
||||
|
||||
import "./QueryExecutionMetadata.less";
|
||||
|
||||
@@ -52,8 +51,7 @@ export default function QueryExecutionMetadata({
|
||||
"Result truncated to " +
|
||||
queryResultData.rows.length +
|
||||
" rows. Databricks may truncate query results that are unstably large."
|
||||
}
|
||||
>
|
||||
}>
|
||||
<WarningTwoTone twoToneColor="#FF9800" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
@@ -69,9 +67,10 @@ export default function QueryExecutionMetadata({
|
||||
)}
|
||||
{isQueryExecuting && <span>Running…</span>}
|
||||
</span>
|
||||
{!isUndefined(queryResultData.metadata.data_scanned) && !isQueryExecuting && (
|
||||
{queryResultData.metadata.data_scanned && (
|
||||
<span className="m-l-5">
|
||||
Data Scanned <strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
Data Scanned
|
||||
<strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
|
||||
@@ -17,16 +17,14 @@ export default function BeaconConsentSettings(props) {
|
||||
Anonymous Usage Data Sharing
|
||||
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||
</span>
|
||||
}
|
||||
>
|
||||
}>
|
||||
{loading ? (
|
||||
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||
) : (
|
||||
<Checkbox
|
||||
name="beacon_consent"
|
||||
checked={values.beacon_consent}
|
||||
onChange={(e) => onChange({ beacon_consent: e.target.checked })}
|
||||
>
|
||||
onChange={e => onChange({ beacon_consent: e.target.checked })}>
|
||||
Help Redash improve by automatically sending anonymous usage data
|
||||
</Checkbox>
|
||||
)}
|
||||
|
||||
@@ -36,7 +36,6 @@ const Alert = {
|
||||
delete: data => axios.delete(`api/alerts/${data.id}`),
|
||||
mute: data => axios.post(`api/alerts/${data.id}/mute`),
|
||||
unmute: data => axios.delete(`api/alerts/${data.id}/mute`),
|
||||
evaluate: data => axios.post(`api/alerts/${data.id}/eval`),
|
||||
};
|
||||
|
||||
export default Alert;
|
||||
|
||||
@@ -61,7 +61,7 @@ class DateParameter extends Parameter {
|
||||
return value;
|
||||
}
|
||||
|
||||
const normalizedValue = moment(value, moment.ISO_8601, true);
|
||||
const normalizedValue = moment(value);
|
||||
return normalizedValue.isValid() ? normalizedValue : null;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { toString, isNull } from "lodash";
|
||||
import Parameter from "./Parameter";
|
||||
|
||||
class TextPatternParameter extends Parameter {
|
||||
constructor(parameter, parentQueryId) {
|
||||
super(parameter, parentQueryId);
|
||||
this.regex = parameter.regex;
|
||||
this.setValue(parameter.value);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
normalizeValue(value) {
|
||||
const normalizedValue = toString(value);
|
||||
if (isNull(normalizedValue)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var re = new RegExp(this.regex);
|
||||
|
||||
if (re !== null) {
|
||||
if (re.test(normalizedValue)) {
|
||||
return normalizedValue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default TextPatternParameter;
|
||||
@@ -5,7 +5,6 @@ import EnumParameter from "./EnumParameter";
|
||||
import QueryBasedDropdownParameter from "./QueryBasedDropdownParameter";
|
||||
import DateParameter from "./DateParameter";
|
||||
import DateRangeParameter from "./DateRangeParameter";
|
||||
import TextPatternParameter from "./TextPatternParameter";
|
||||
|
||||
function createParameter(param, parentQueryId) {
|
||||
switch (param.type) {
|
||||
@@ -23,8 +22,6 @@ function createParameter(param, parentQueryId) {
|
||||
case "datetime-range":
|
||||
case "datetime-range-with-seconds":
|
||||
return new DateRangeParameter(param, parentQueryId);
|
||||
case "text-pattern":
|
||||
return new TextPatternParameter({ ...param, type: "text-pattern" }, parentQueryId);
|
||||
default:
|
||||
return new TextParameter({ ...param, type: "text" }, parentQueryId);
|
||||
}
|
||||
@@ -37,7 +34,6 @@ function cloneParameter(param) {
|
||||
export {
|
||||
Parameter,
|
||||
TextParameter,
|
||||
TextPatternParameter,
|
||||
NumberParameter,
|
||||
EnumParameter,
|
||||
QueryBasedDropdownParameter,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import {
|
||||
createParameter,
|
||||
TextParameter,
|
||||
TextPatternParameter,
|
||||
NumberParameter,
|
||||
EnumParameter,
|
||||
QueryBasedDropdownParameter,
|
||||
@@ -13,7 +12,6 @@ describe("Parameter", () => {
|
||||
describe("create", () => {
|
||||
const parameterTypes = [
|
||||
["text", TextParameter],
|
||||
["text-pattern", TextPatternParameter],
|
||||
["number", NumberParameter],
|
||||
["enum", EnumParameter],
|
||||
["query", QueryBasedDropdownParameter],
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import { createParameter } from "..";
|
||||
|
||||
describe("TextPatternParameter", () => {
|
||||
let param;
|
||||
|
||||
beforeEach(() => {
|
||||
param = createParameter({ name: "param", title: "Param", type: "text-pattern", regex: "a+" });
|
||||
});
|
||||
|
||||
describe("noramlizeValue", () => {
|
||||
test("converts matching strings", () => {
|
||||
const normalizedValue = param.normalizeValue("art");
|
||||
expect(normalizedValue).toBe("art");
|
||||
});
|
||||
|
||||
test("returns null when string does not match pattern", () => {
|
||||
const normalizedValue = param.normalizeValue("brt");
|
||||
expect(normalizedValue).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -114,7 +114,7 @@ export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
}
|
||||
|
||||
export function isDateTime(v) {
|
||||
return isString(v) && moment(v, moment.ISO_8601, true).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
|
||||
return isString(v) && moment(v).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
|
||||
}
|
||||
|
||||
class QueryResult {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable import/no-extraneous-dependencies, no-console */
|
||||
const { find } = require("lodash");
|
||||
const atob = require("atob");
|
||||
const { execSync } = require("child_process");
|
||||
const { get, post } = require("request").defaults({ jar: true });
|
||||
const { seedData } = require("./seed-data");
|
||||
@@ -59,11 +60,23 @@ function stopServer() {
|
||||
|
||||
function runCypressCI() {
|
||||
const {
|
||||
PERCY_TOKEN_ENCODED,
|
||||
CYPRESS_PROJECT_ID_ENCODED,
|
||||
CYPRESS_RECORD_KEY_ENCODED,
|
||||
GITHUB_REPOSITORY,
|
||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||
} = process.env;
|
||||
|
||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
||||
if (PERCY_TOKEN_ENCODED) {
|
||||
process.env.PERCY_TOKEN = atob(`${PERCY_TOKEN_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_PROJECT_ID_ENCODED) {
|
||||
process.env.CYPRESS_PROJECT_ID = atob(`${CYPRESS_PROJECT_ID_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_RECORD_KEY_ENCODED) {
|
||||
process.env.CYPRESS_RECORD_KEY = atob(`${CYPRESS_RECORD_KEY_ENCODED}`);
|
||||
}
|
||||
process.env.CYPRESS_OPTIONS = "--record";
|
||||
}
|
||||
|
||||
|
||||
@@ -2,14 +2,16 @@ import { dragParam } from "../../support/parameters";
|
||||
import dayjs from "dayjs";
|
||||
|
||||
function openAndSearchAntdDropdown(testId, paramOption) {
|
||||
cy.getByTestId(testId).find(".ant-select-selection-search-input").type(paramOption, { force: true });
|
||||
cy.getByTestId(testId)
|
||||
.find(".ant-select-selection-search-input")
|
||||
.type(paramOption, { force: true });
|
||||
}
|
||||
|
||||
describe("Parameter", () => {
|
||||
const expectDirtyStateChange = (edit) => {
|
||||
const expectDirtyStateChange = edit => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".parameter-input")
|
||||
.should(($el) => {
|
||||
.should($el => {
|
||||
assert.isUndefined($el.data("dirty"));
|
||||
});
|
||||
|
||||
@@ -17,7 +19,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".parameter-input")
|
||||
.should(($el) => {
|
||||
.should($el => {
|
||||
assert.isTrue($el.data("dirty"));
|
||||
});
|
||||
};
|
||||
@@ -40,7 +42,9 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -49,66 +53,13 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Text Pattern Parameter", () => {
|
||||
beforeEach(() => {
|
||||
const queryData = {
|
||||
name: "Text Pattern Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
options: {
|
||||
parameters: [{ name: "test-parameter", title: "Test Parameter", type: "text-pattern", regex: "a.*a" }],
|
||||
},
|
||||
};
|
||||
|
||||
cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", "arta");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arounda");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", "arounda");
|
||||
});
|
||||
|
||||
it("throws error message with invalid query request", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}abcab");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("QueryExecutionStatus").should("exist");
|
||||
});
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't let user save invalid regex", () => {
|
||||
cy.get(".fa-cog").click();
|
||||
cy.getByTestId("RegexPatternInput").type("{selectall}[");
|
||||
cy.contains("Invalid Regex Pattern").should("exist");
|
||||
cy.getByTestId("SaveParameterSettings").click();
|
||||
cy.get(".fa-cog").click();
|
||||
cy.getByTestId("RegexPatternInput").should("not.equal", "[");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Number Parameter", () => {
|
||||
beforeEach(() => {
|
||||
const queryData = {
|
||||
@@ -123,13 +74,17 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}42");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", 42);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}31415");
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}31415");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -138,7 +93,9 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}42");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -162,7 +119,10 @@ describe("Parameter", () => {
|
||||
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
|
||||
|
||||
// only the filtered option should be on the DOM
|
||||
cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
|
||||
cy.get(".ant-select-item-option")
|
||||
.should("have.length", 1)
|
||||
.and("contain", "value2")
|
||||
.click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
// ensure that query is being executed
|
||||
@@ -180,10 +140,12 @@ describe("Parameter", () => {
|
||||
SaveParameterSettings
|
||||
`);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select-selection-search").click();
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select-selection-search")
|
||||
.click();
|
||||
|
||||
// select all unselected options
|
||||
cy.get(".ant-select-item-option").each(($option) => {
|
||||
cy.get(".ant-select-item-option").each($option => {
|
||||
if (!$option.hasClass("ant-select-item-option-selected")) {
|
||||
cy.wrap($option).click();
|
||||
}
|
||||
@@ -198,7 +160,9 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
|
||||
cy.contains(".ant-select-item-option", "value2").click();
|
||||
});
|
||||
@@ -212,7 +176,7 @@ describe("Parameter", () => {
|
||||
name: "Dropdown Query",
|
||||
query: "",
|
||||
};
|
||||
cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
|
||||
cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
|
||||
const queryData = {
|
||||
name: "Query Based Dropdown Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
@@ -244,7 +208,7 @@ describe("Parameter", () => {
|
||||
SELECT 'value2' AS name, 2 AS value UNION ALL
|
||||
SELECT 'value3' AS name, 3 AS value`,
|
||||
};
|
||||
cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
|
||||
cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
|
||||
const queryData = {
|
||||
name: "Query Based Dropdown Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
@@ -270,7 +234,10 @@ describe("Parameter", () => {
|
||||
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
|
||||
|
||||
// only the filtered option should be on the DOM
|
||||
cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
|
||||
cy.get(".ant-select-item-option")
|
||||
.should("have.length", 1)
|
||||
.and("contain", "value2")
|
||||
.click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
// ensure that query is being executed
|
||||
@@ -288,10 +255,12 @@ describe("Parameter", () => {
|
||||
SaveParameterSettings
|
||||
`);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
|
||||
// make sure all options are unselected and select all
|
||||
cy.get(".ant-select-item-option").each(($option) => {
|
||||
cy.get(".ant-select-item-option").each($option => {
|
||||
expect($option).not.to.have.class("ant-select-dropdown-menu-item-selected");
|
||||
cy.wrap($option).click();
|
||||
});
|
||||
@@ -305,10 +274,14 @@ describe("Parameter", () => {
|
||||
});
|
||||
});
|
||||
|
||||
const selectCalendarDate = (date) => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").click();
|
||||
const selectCalendarDate = date => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.click();
|
||||
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", date).click();
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", date)
|
||||
.click();
|
||||
};
|
||||
|
||||
describe("Date Parameter", () => {
|
||||
@@ -330,10 +303,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
cy.clock().then(clock => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date", function () {
|
||||
it("updates the results after selecting a date", function() {
|
||||
selectCalendarDate("15");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
@@ -341,10 +314,12 @@ describe("Parameter", () => {
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("15/MM/YY"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function () {
|
||||
it("allows picking a dynamic date", function() {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Today/Now")
|
||||
.click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -375,11 +350,14 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
cy.clock().then(clock => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date and clicking in ok", function () {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
|
||||
it("updates the results after selecting a date and clicking in ok", function() {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.click();
|
||||
|
||||
selectCalendarDate("15");
|
||||
|
||||
@@ -390,20 +368,27 @@ describe("Parameter", () => {
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-15 HH:mm"));
|
||||
});
|
||||
|
||||
it("shows the current datetime after clicking in Now", function () {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
|
||||
it("shows the current datetime after clicking in Now", function() {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.click();
|
||||
|
||||
cy.get(".ant-picker-panel").contains("Now").click();
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains("Now")
|
||||
.click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-DD HH:mm"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function () {
|
||||
it("allows picking a dynamic date", function() {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Today/Now")
|
||||
.click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -412,20 +397,31 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").click();
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.click();
|
||||
|
||||
cy.get(".ant-picker-panel").contains("Now").click();
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains("Now")
|
||||
.click();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Date Range Parameter", () => {
|
||||
const selectCalendarDateRange = (startDate, endDate) => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").first().click();
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.first()
|
||||
.click();
|
||||
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", startDate).click();
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", startDate)
|
||||
.click();
|
||||
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", endDate).click();
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", endDate)
|
||||
.click();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -446,10 +442,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
cy.clock().then(clock => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date range", function () {
|
||||
it("updates the results after selecting a date range", function() {
|
||||
selectCalendarDateRange("15", "20");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
@@ -461,10 +457,12 @@ describe("Parameter", () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date range", function () {
|
||||
it("allows picking a dynamic date range", function() {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Last month").click();
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Last month")
|
||||
.click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -481,10 +479,15 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
describe("Apply Changes", () => {
|
||||
const expectAppliedChanges = (apply) => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
|
||||
const expectAppliedChanges = apply => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-2")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
|
||||
cy.location("search").should("not.contain", "Redash");
|
||||
|
||||
@@ -520,7 +523,10 @@ describe("Parameter", () => {
|
||||
it("shows and hides according to parameter dirty state", () => {
|
||||
cy.getByTestId("ParameterApplyButton").should("not.be", "visible");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Param").type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Param")
|
||||
.type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").should("be.visible");
|
||||
|
||||
@@ -530,13 +536,21 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates dirty counter", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "1");
|
||||
cy.getByTestId("ParameterApplyButton")
|
||||
.find(".ant-badge-count p.current")
|
||||
.should("contain", "1");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-2")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "2");
|
||||
cy.getByTestId("ParameterApplyButton")
|
||||
.find(".ant-badge-count p.current")
|
||||
.should("contain", "2");
|
||||
});
|
||||
|
||||
it('applies changes from "Apply Changes" button', () => {
|
||||
@@ -546,13 +560,16 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it('applies changes from "alt+enter" keyboard shortcut', () => {
|
||||
expectAppliedChanges((input) => {
|
||||
expectAppliedChanges(input => {
|
||||
input.type("{alt}{enter}");
|
||||
});
|
||||
});
|
||||
|
||||
it('disables "Execute" button', () => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ExecuteButton").should("be.disabled");
|
||||
|
||||
cy.get("@Input").clear();
|
||||
@@ -577,12 +594,15 @@ describe("Parameter", () => {
|
||||
|
||||
cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
|
||||
|
||||
cy.get(".parameter-block").first().invoke("width").as("paramWidth");
|
||||
cy.get(".parameter-block")
|
||||
.first()
|
||||
.invoke("width")
|
||||
.as("paramWidth");
|
||||
|
||||
cy.get("body").type("{alt}D"); // hide schema browser
|
||||
});
|
||||
|
||||
it("is possible to rearrange parameters", function () {
|
||||
it("is possible to rearrange parameters", function() {
|
||||
cy.server();
|
||||
cy.route("POST", "**/api/queries/*").as("QuerySave");
|
||||
|
||||
|
||||
@@ -26,33 +26,33 @@ const SQL = `
|
||||
describe("Chart", () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
cy.createQuery({ name: "Chart Visualization", query: SQL }).its("id").as("queryId");
|
||||
cy.createQuery({ name: "Chart Visualization", query: SQL })
|
||||
.its("id")
|
||||
.as("queryId");
|
||||
});
|
||||
|
||||
it("creates Bar charts", function () {
|
||||
it("creates Bar charts", function() {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
|
||||
const getBarChartAssertionFunction =
|
||||
(specificBarChartAssertionFn = () => {}) =>
|
||||
() => {
|
||||
// checks for TabbedEditor standard tabs
|
||||
assertTabbedEditor();
|
||||
const getBarChartAssertionFunction = (specificBarChartAssertionFn = () => {}) => () => {
|
||||
// checks for TabbedEditor standard tabs
|
||||
assertTabbedEditor();
|
||||
|
||||
// standard chart should be bar
|
||||
cy.getByTestId("Chart.GlobalSeriesType").contains(".ant-select-selection-item", "Bar");
|
||||
// standard chart should be bar
|
||||
cy.getByTestId("Chart.GlobalSeriesType").contains(".ant-select-selection-item", "Bar");
|
||||
|
||||
// checks the plot canvas exists and is empty
|
||||
assertPlotPreview("not.exist");
|
||||
// checks the plot canvas exists and is empty
|
||||
assertPlotPreview("not.exist");
|
||||
|
||||
// creates a chart and checks it is plotted
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value2");
|
||||
assertPlotPreview("exist");
|
||||
// creates a chart and checks it is plotted
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value2");
|
||||
assertPlotPreview("exist");
|
||||
|
||||
specificBarChartAssertionFn();
|
||||
};
|
||||
specificBarChartAssertionFn();
|
||||
};
|
||||
|
||||
const chartTests = [
|
||||
{
|
||||
@@ -95,8 +95,8 @@ describe("Chart", () => {
|
||||
|
||||
const withDashboardWidgetsAssertionFn = (widgetGetters, dashboardUrl) => {
|
||||
cy.visit(dashboardUrl);
|
||||
widgetGetters.forEach((widgetGetter) => {
|
||||
cy.get(`@${widgetGetter}`).then((widget) => {
|
||||
widgetGetters.forEach(widgetGetter => {
|
||||
cy.get(`@${widgetGetter}`).then(widget => {
|
||||
cy.getByTestId(getWidgetTestId(widget)).within(() => {
|
||||
cy.get("g.points").should("exist");
|
||||
});
|
||||
@@ -107,34 +107,4 @@ describe("Chart", () => {
|
||||
createDashboardWithCharts("Bar chart visualizations", chartGetters, withDashboardWidgetsAssertionFn);
|
||||
cy.percySnapshot("Visualizations - Charts - Bar");
|
||||
});
|
||||
it("colors Bar charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionViridis").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionTableau 10").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionD3 Category 10").click();
|
||||
});
|
||||
it("colors Pie charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.GlobalSeriesType").click();
|
||||
cy.getByTestId("Chart.ChartType.pie").click();
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionViridis").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionTableau 10").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionD3 Category 10").click();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,7 +22,10 @@ function prepareVisualization(query, type, name, options) {
|
||||
cy.get("body").type("{alt}D");
|
||||
|
||||
// do some pre-checks here to ensure that visualization was created and is visible
|
||||
cy.getByTestId("TableVisualization").should("exist").find("table").should("exist");
|
||||
cy.getByTestId("TableVisualization")
|
||||
.should("exist")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
|
||||
return cy.then(() => ({ queryId, visualizationId }));
|
||||
});
|
||||
@@ -50,7 +53,7 @@ describe("Table", () => {
|
||||
});
|
||||
|
||||
describe("Sorting data", () => {
|
||||
beforeEach(function () {
|
||||
beforeEach(function() {
|
||||
const { query, config } = MultiColumnSort;
|
||||
prepareVisualization(query, "TABLE", "Sort data", config).then(({ queryId, visualizationId }) => {
|
||||
this.queryId = queryId;
|
||||
@@ -58,22 +61,39 @@ describe("Table", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("sorts data by a single column", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click();
|
||||
it("sorts data by a single column", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("c")
|
||||
.should("exist")
|
||||
.click();
|
||||
cy.percySnapshot("Visualizations - Table (Single-column sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
it("sorts data by a multiple columns", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("a").should("exist").click();
|
||||
it("sorts data by a multiple columns", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("a")
|
||||
.should("exist")
|
||||
.click();
|
||||
|
||||
cy.get("body").type("{shift}", { release: false });
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("b").should("exist").click();
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("b")
|
||||
.should("exist")
|
||||
.click();
|
||||
|
||||
cy.percySnapshot("Visualizations - Table (Multi-column sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
it("sorts data in reverse order", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click().click();
|
||||
it("sorts data in reverse order", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("c")
|
||||
.should("exist")
|
||||
.click()
|
||||
.click();
|
||||
cy.percySnapshot("Visualizations - Table (Single-column reverse sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
@@ -81,7 +101,10 @@ describe("Table", () => {
|
||||
it("searches in multiple columns", () => {
|
||||
const { query, config } = SearchInData;
|
||||
prepareVisualization(query, "TABLE", "Search", config).then(({ visualizationId }) => {
|
||||
cy.getByTestId("TableVisualization").find("table input").should("exist").type("test");
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table input")
|
||||
.should("exist")
|
||||
.type("test");
|
||||
cy.percySnapshot("Visualizations - Table (Search in data)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
const { extend, get, merge, find } = Cypress._;
|
||||
|
||||
const post = (options) =>
|
||||
const post = options =>
|
||||
cy
|
||||
.getCookie("csrf_token")
|
||||
.then((csrf) => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
.then(csrf => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
|
||||
Cypress.Commands.add("createDashboard", (name) => {
|
||||
Cypress.Commands.add("createDashboard", name => {
|
||||
return post({ url: "api/dashboards", body: { name } }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -28,7 +28,7 @@ Cypress.Commands.add("createQuery", (data, shouldPublish = true) => {
|
||||
// eslint-disable-next-line cypress/no-assigning-return-values
|
||||
let request = post({ url: "/api/queries", body: merged }).then(({ body }) => body);
|
||||
if (shouldPublish) {
|
||||
request = request.then((query) =>
|
||||
request = request.then(query =>
|
||||
post({ url: `/api/queries/${query.id}`, body: { is_draft: false } }).then(() => query)
|
||||
);
|
||||
}
|
||||
@@ -86,7 +86,6 @@ Cypress.Commands.add("addWidget", (dashboardId, visualizationId, options = {}) =
|
||||
Cypress.Commands.add("createAlert", (queryId, options = {}, name) => {
|
||||
const defaultOptions = {
|
||||
column: "?column?",
|
||||
selector: "first",
|
||||
op: "greater than",
|
||||
rearm: 0,
|
||||
value: 1,
|
||||
@@ -110,7 +109,7 @@ Cypress.Commands.add("createUser", ({ name, email, password }) => {
|
||||
url: "api/users?no_invite=yes",
|
||||
body: { name, email },
|
||||
failOnStatusCode: false,
|
||||
}).then((xhr) => {
|
||||
}).then(xhr => {
|
||||
const { status, body } = xhr;
|
||||
if (status < 200 || status > 400) {
|
||||
throw new Error(xhr);
|
||||
@@ -147,7 +146,7 @@ Cypress.Commands.add("getDestinations", () => {
|
||||
Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) => {
|
||||
return cy
|
||||
.getDestinations()
|
||||
.then((destinations) => {
|
||||
.then(destinations => {
|
||||
const destination = find(destinations, { name: destinationName });
|
||||
if (!destination) {
|
||||
throw new Error("Destination not found");
|
||||
@@ -167,6 +166,6 @@ Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) =>
|
||||
});
|
||||
});
|
||||
|
||||
Cypress.Commands.add("updateOrgSettings", (settings) => {
|
||||
Cypress.Commands.add("updateOrgSettings", settings => {
|
||||
return post({ url: "api/settings/organization", body: settings }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
export function expectTableToHaveLength(length) {
|
||||
cy.getByTestId("TableVisualization").find("tbody tr").should("have.length", length);
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr")
|
||||
.should("have.length", length);
|
||||
}
|
||||
|
||||
export function expectFirstColumnToHaveMembers(values) {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr td:first-child")
|
||||
.then(($cell) => Cypress.$.map($cell, (item) => Cypress.$(item).text()))
|
||||
.then((firstColumnCells) => expect(firstColumnCells).to.have.members(values));
|
||||
.then($cell => Cypress.$.map($cell, item => Cypress.$(item).text()))
|
||||
.then(firstColumnCells => expect(firstColumnCells).to.have.members(values));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# This configuration file is for the **development** setup.
|
||||
# For a production example please refer to getredash/setup repository on GitHub.
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: .
|
||||
@@ -10,7 +11,6 @@ x-redash-service: &redash-service
|
||||
env_file:
|
||||
- .env
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_HOST: http://localhost:5001
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
ports:
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
@@ -24,56 +24,62 @@ def upgrade():
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='schedule::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='additional_properties::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='settings::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='layout::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('query_results', 'data',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='data::text',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='change::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('visualizations', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('widgets', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
|
||||
|
||||
def downgrade():
|
||||
@@ -83,53 +89,58 @@ def downgrade():
|
||||
type_=sa.Text(),
|
||||
postgresql_using='options::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='schedule::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='additional_properties::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='settings::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
existing_nullable=True,
|
||||
)
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
)
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='layout::text',
|
||||
)
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('query_results', 'data',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='data::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='change::json',
|
||||
)
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('visualizations', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
)
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('widgets', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
)
|
||||
server_default=sa.text("'{}'::text"))
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
"""fix_hash
|
||||
|
||||
Revision ID: 9e8c841d1a30
|
||||
Revises: 7205816877ec
|
||||
Create Date: 2024-10-05 18:55:35.730573
|
||||
|
||||
"""
|
||||
import logging
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy import select
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, get_query_runner
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9e8c841d1a30'
|
||||
down_revision = '7205816877ec'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def update_query_hash(record):
|
||||
should_apply_auto_limit = record['options'].get("apply_auto_limit", False) if record['options'] else False
|
||||
query_runner = get_query_runner(record['type'], {}) if record['type'] else BaseQueryRunner({})
|
||||
query_text = record['query']
|
||||
|
||||
parameters_dict = {p["name"]: p.get("value") for p in record['options'].get('parameters', [])} if record.options else {}
|
||||
if any(parameters_dict):
|
||||
print(f"Query {record['query_id']} has parameters. Hash might be incorrect.")
|
||||
|
||||
return query_runner.gen_query_hash(query_text, should_apply_auto_limit)
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
metadata = sa.MetaData(bind=conn)
|
||||
queries = sa.Table("queries", metadata, autoload=True)
|
||||
data_sources = sa.Table("data_sources", metadata, autoload=True)
|
||||
|
||||
joined_table = queries.outerjoin(data_sources, queries.c.data_source_id == data_sources.c.id)
|
||||
|
||||
query = select([
|
||||
queries.c.id.label("query_id"),
|
||||
queries.c.query,
|
||||
queries.c.query_hash,
|
||||
queries.c.options,
|
||||
data_sources.c.id.label("data_source_id"),
|
||||
data_sources.c.type
|
||||
]).select_from(joined_table)
|
||||
|
||||
for record in conn.execute(query):
|
||||
new_hash = update_query_hash(record)
|
||||
print(f"Updating hash for query {record['query_id']} from {record['query_hash']} to {new_hash}")
|
||||
conn.execute(
|
||||
queries.update()
|
||||
.where(queries.c.id == record['query_id'])
|
||||
.values(query_hash=new_hash))
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
@@ -28,7 +28,7 @@ def upgrade():
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
update_query = """
|
||||
update users
|
||||
set details = details::jsonb || ('{"profile_image_url": "' || profile_image_url || '"}')::jsonb
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
command = "cd ../ && yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 && yarn build && cd ./client"
|
||||
|
||||
[build.environment]
|
||||
NODE_VERSION = "18"
|
||||
NODE_VERSION = "16.20.1"
|
||||
NETLIFY_USE_YARN = "true"
|
||||
YARN_VERSION = "1.22.19"
|
||||
CYPRESS_INSTALL_BINARY = "0"
|
||||
|
||||
23
package.json
23
package.json
@@ -1,19 +1,20 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "25.1.0",
|
||||
"version": "24.02.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "npm-run-all --parallel watch:viz webpack-dev-server",
|
||||
"clean": "rm -rf ./client/dist/",
|
||||
"build:viz": "(cd viz-lib && yarn build:babel)",
|
||||
"build": "yarn clean && yarn build:viz && NODE_OPTIONS=--openssl-legacy-provider NODE_ENV=production webpack",
|
||||
"watch:app": "NODE_OPTIONS=--openssl-legacy-provider webpack watch --progress",
|
||||
"build": "yarn clean && yarn build:viz && NODE_ENV=production webpack",
|
||||
"build:old-node-version": "yarn clean && NODE_ENV=production node --max-old-space-size=4096 node_modules/.bin/webpack",
|
||||
"watch:app": "webpack watch --progress",
|
||||
"watch:viz": "(cd viz-lib && yarn watch:babel)",
|
||||
"watch": "npm-run-all --parallel watch:*",
|
||||
"webpack-dev-server": "webpack-dev-server",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on webpack",
|
||||
"lint": "yarn lint:base --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:fix": "yarn lint:base --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:base": "eslint --config ./client/.eslintrc.js --ignore-path ./client/.eslintignore",
|
||||
@@ -33,8 +34,7 @@
|
||||
"url": "git+https://github.com/getredash/redash.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">16.0 <21.0",
|
||||
"npm": "please-use-yarn",
|
||||
"node": ">14.16.0 <17.0.0",
|
||||
"yarn": "^1.22.10"
|
||||
},
|
||||
"author": "Redash Contributors",
|
||||
@@ -50,12 +50,11 @@
|
||||
"antd": "^4.4.3",
|
||||
"axios": "0.27.2",
|
||||
"axios-auth-refresh": "3.3.6",
|
||||
"bootstrap": "^3.4.1",
|
||||
"bootstrap": "^3.3.7",
|
||||
"classnames": "^2.2.6",
|
||||
"d3": "^3.5.17",
|
||||
"debug": "^3.2.7",
|
||||
"dompurify": "^2.0.17",
|
||||
"elliptic": "^6.6.0",
|
||||
"font-awesome": "^4.7.0",
|
||||
"history": "^4.10.1",
|
||||
"hoist-non-react-statics": "^3.3.0",
|
||||
@@ -64,7 +63,7 @@
|
||||
"mousetrap": "^1.6.1",
|
||||
"mustache": "^2.3.0",
|
||||
"numeral": "^2.0.6",
|
||||
"path-to-regexp": "^3.3.0",
|
||||
"path-to-regexp": "^3.1.0",
|
||||
"prop-types": "^15.6.1",
|
||||
"query-string": "^6.9.0",
|
||||
"react": "16.14.0",
|
||||
@@ -180,8 +179,8 @@
|
||||
]
|
||||
},
|
||||
"browser": {
|
||||
"fs": false,
|
||||
"path": false
|
||||
"fs": false,
|
||||
"path": false
|
||||
},
|
||||
"//": "browserslist set to 'Async functions' compatibility",
|
||||
"browserslist": [
|
||||
|
||||
972
poetry.lock
generated
972
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ force-exclude = '''
|
||||
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "25.1.0"
|
||||
version = "24.02.0-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
@@ -29,7 +29,7 @@ authlib = "0.15.5"
|
||||
backoff = "2.2.1"
|
||||
blinker = "1.6.2"
|
||||
click = "8.1.3"
|
||||
cryptography = "43.0.1"
|
||||
cryptography = "41.0.6"
|
||||
disposable-email-domains = ">=0.0.52"
|
||||
flask = "2.3.2"
|
||||
flask-limiter = "3.3.1"
|
||||
@@ -43,10 +43,10 @@ flask-wtf = "1.1.1"
|
||||
funcy = "1.13"
|
||||
gevent = "23.9.1"
|
||||
greenlet = "2.0.2"
|
||||
gunicorn = "22.0.0"
|
||||
gunicorn = "20.0.4"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.5"
|
||||
jinja2 = "3.1.3"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
@@ -54,7 +54,7 @@ parsedatetime = "2.4"
|
||||
passlib = "1.7.3"
|
||||
psycopg2-binary = "2.9.6"
|
||||
pyjwt = "2.4.0"
|
||||
pyopenssl = "24.2.1"
|
||||
pyopenssl = "23.2.0"
|
||||
pypd = "1.1.0"
|
||||
pysaml2 = "7.3.1"
|
||||
pystache = "0.6.0"
|
||||
@@ -64,30 +64,27 @@ pytz = ">=2019.3"
|
||||
pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.32.3"
|
||||
restrictedpython = "7.3"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
requests = "2.31.0"
|
||||
restrictedpython = "6.2"
|
||||
rq = "1.9.0"
|
||||
rq-scheduler = "0.11.0"
|
||||
semver = "2.8.1"
|
||||
sentry-sdk = "1.45.1"
|
||||
sentry-sdk = "1.28.1"
|
||||
sqlalchemy = "1.3.24"
|
||||
sqlalchemy-searchable = "1.2.0"
|
||||
sqlalchemy-utils = "0.38.3"
|
||||
sqlparse = "0.5.0"
|
||||
sqlalchemy-utils = "0.34.2"
|
||||
sqlparse = "0.4.4"
|
||||
sshtunnel = "0.1.5"
|
||||
statsd = "3.3.0"
|
||||
supervisor = "4.1.0"
|
||||
supervisor-checks = "0.8.1"
|
||||
ua-parser = "0.18.0"
|
||||
urllib3 = "1.26.19"
|
||||
urllib3 = "1.26.18"
|
||||
user-agents = "2.0"
|
||||
werkzeug = "2.3.8"
|
||||
wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
debugpy = "^1.8.9"
|
||||
paramiko = "3.4.1"
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
@@ -113,26 +110,27 @@ nzalchemy = "^11.0.2"
|
||||
nzpy = ">=1.15"
|
||||
oauth2client = "4.1.3"
|
||||
openpyxl = "3.0.7"
|
||||
oracledb = "2.1.2"
|
||||
oracledb = "2.0.1"
|
||||
pandas = "1.3.4"
|
||||
phoenixdb = "0.7"
|
||||
pinotdb = ">=0.4.5"
|
||||
protobuf = "3.20.2"
|
||||
pyathena = "2.25.2"
|
||||
pyathena = ">=1.5.0,<=1.11.5"
|
||||
pydgraph = "2.0.2"
|
||||
pydruid = "0.5.7"
|
||||
pyexasol = "0.12.0"
|
||||
pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymssql = "^2.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
pymongo = { version = "4.3.3", extras = ["srv", "tls"] }
|
||||
pymssql = "2.2.8"
|
||||
pyodbc = "4.0.28"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.20"
|
||||
python-rapidjson = "1.1.0"
|
||||
qds-sdk = ">=1.9.6"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
snowflake-connector-python = "3.12.3"
|
||||
snowflake-connector-python = "3.4.0"
|
||||
td-client = "1.0.0"
|
||||
thrift = ">=0.8.0"
|
||||
thrift-sasl = ">=0.1.0"
|
||||
@@ -154,10 +152,11 @@ optional = true
|
||||
pytest = "7.4.0"
|
||||
coverage = "7.2.7"
|
||||
freezegun = "1.2.1"
|
||||
jwcrypto = "1.5.6"
|
||||
jwcrypto = "1.5.1"
|
||||
mock = "5.0.2"
|
||||
pre-commit = "3.3.3"
|
||||
ptpython = "3.0.23"
|
||||
ptvsd = "4.3.2"
|
||||
pytest-cov = "4.1.0"
|
||||
watchdog = "3.0.0"
|
||||
ruff = "0.0.289"
|
||||
|
||||
@@ -14,14 +14,13 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "25.1.0"
|
||||
__version__ = "24.02.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
import debugpy
|
||||
import ptvsd
|
||||
|
||||
debugpy.listen(("0.0.0.0", 5678))
|
||||
debugpy.wait_for_client()
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 5678))
|
||||
|
||||
|
||||
def setup_logging():
|
||||
|
||||
@@ -8,7 +8,6 @@ from redash import settings
|
||||
|
||||
try:
|
||||
from ldap3 import Connection, Server
|
||||
from ldap3.utils.conv import escape_filter_chars
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit(
|
||||
@@ -70,7 +69,6 @@ def login(org_slug=None):
|
||||
|
||||
|
||||
def auth_ldap_user(username, password):
|
||||
clean_username = escape_filter_chars(username)
|
||||
server = Server(settings.LDAP_HOST_URL, use_ssl=settings.LDAP_SSL)
|
||||
if settings.LDAP_BIND_DN is not None:
|
||||
conn = Connection(
|
||||
@@ -85,7 +83,7 @@ def auth_ldap_user(username, password):
|
||||
|
||||
conn.search(
|
||||
settings.LDAP_SEARCH_DN,
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": clean_username},
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": username},
|
||||
attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY],
|
||||
)
|
||||
|
||||
|
||||
@@ -5,22 +5,6 @@ from sqlalchemy.orm.exc import NoResultFound
|
||||
manager = AppGroup(help="Queries management commands.")
|
||||
|
||||
|
||||
@manager.command(name="rehash")
|
||||
def rehash():
|
||||
from redash import models
|
||||
|
||||
for q in models.Query.query.all():
|
||||
old_hash = q.query_hash
|
||||
q.update_query_hash()
|
||||
new_hash = q.query_hash
|
||||
|
||||
if old_hash != new_hash:
|
||||
print(f"Query {q.id} has changed hash from {old_hash} to {new_hash}")
|
||||
models.db.session.add(q)
|
||||
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
@manager.command(name="add_tag")
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
|
||||
@@ -42,8 +42,8 @@ class Discord(BaseDestination):
|
||||
"inline": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
fields.append({"name": "Description", "value": alert.custom_body})
|
||||
if alert.options.get("custom_body"):
|
||||
fields.append({"name": "Description", "value": alert.options["custom_body"]})
|
||||
if new_state == Alert.TRIGGERED_STATE:
|
||||
if alert.options.get("custom_subject"):
|
||||
text = alert.options["custom_subject"]
|
||||
|
||||
@@ -26,13 +26,13 @@ class Slack(BaseDestination):
|
||||
fields = [
|
||||
{
|
||||
"title": "Query",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
"title": "Alert",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
|
||||
"short": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
@@ -50,7 +50,7 @@ class Slack(BaseDestination):
|
||||
payload = {"attachments": [{"text": text, "color": color, "fields": fields}]}
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload).encode("utf-8"), timeout=5.0)
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload), timeout=5.0)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from flask import request
|
||||
from funcy import project
|
||||
|
||||
from redash import models, utils
|
||||
from redash import models
|
||||
from redash.handlers.base import (
|
||||
BaseResource,
|
||||
get_object_or_404,
|
||||
@@ -14,10 +14,6 @@ from redash.permissions import (
|
||||
view_only,
|
||||
)
|
||||
from redash.serializers import serialize_alert
|
||||
from redash.tasks.alerts import (
|
||||
notify_subscriptions,
|
||||
should_notify,
|
||||
)
|
||||
|
||||
|
||||
class AlertResource(BaseResource):
|
||||
@@ -47,21 +43,6 @@ class AlertResource(BaseResource):
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
class AlertEvaluateResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
require_admin_or_owner(alert.user.id)
|
||||
|
||||
new_state = alert.evaluate()
|
||||
if should_notify(alert, new_state):
|
||||
alert.state = new_state
|
||||
alert.last_triggered_at = utils.utcnow()
|
||||
models.db.session.commit()
|
||||
|
||||
notify_subscriptions(alert, new_state, {})
|
||||
self.record_event({"action": "evaluate", "object_id": alert.id, "object_type": "alert"})
|
||||
|
||||
|
||||
class AlertMuteResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
|
||||
@@ -3,7 +3,6 @@ from flask_restful import Api
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from redash.handlers.alerts import (
|
||||
AlertEvaluateResource,
|
||||
AlertListResource,
|
||||
AlertMuteResource,
|
||||
AlertResource,
|
||||
@@ -118,7 +117,6 @@ def json_representation(data, code, headers=None):
|
||||
|
||||
api.add_org_resource(AlertResource, "/api/alerts/<alert_id>", endpoint="alert")
|
||||
api.add_org_resource(AlertMuteResource, "/api/alerts/<alert_id>/mute", endpoint="alert_mute")
|
||||
api.add_org_resource(AlertEvaluateResource, "/api/alerts/<alert_id>/eval", endpoint="alert_eval")
|
||||
api.add_org_resource(
|
||||
AlertSubscriptionListResource,
|
||||
"/api/alerts/<alert_id>/subscriptions",
|
||||
|
||||
@@ -29,7 +29,6 @@ def get_google_auth_url(next_path):
|
||||
|
||||
|
||||
def render_token_login_page(template, org_slug, token, invite):
|
||||
error_message = None
|
||||
try:
|
||||
user_id = validate_token(token)
|
||||
org = current_org._get_current_object()
|
||||
@@ -41,19 +40,19 @@ def render_token_login_page(template, org_slug, token, invite):
|
||||
user_id,
|
||||
org_slug,
|
||||
)
|
||||
error_message = "Your invite link is invalid. Bad user id in token. Please ask for a new one."
|
||||
except SignatureExpired:
|
||||
logger.exception("Token signature has expired. Token: %s, org=%s", token, org_slug)
|
||||
error_message = "Your invite link has expired. Please ask for a new one."
|
||||
except BadSignature:
|
||||
logger.exception("Bad signature for the token: %s, org=%s", token, org_slug)
|
||||
error_message = "Your invite link is invalid. Bad signature. Please double-check the token."
|
||||
|
||||
if error_message:
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message=error_message,
|
||||
error_message="Invalid invite link. Please ask for a new one.",
|
||||
),
|
||||
400,
|
||||
)
|
||||
except (SignatureExpired, BadSignature):
|
||||
logger.exception("Failed to verify invite token: %s, org=%s", token, org_slug)
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Your invite link has expired. Please ask for a new one.",
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
@@ -7,13 +7,13 @@ from flask_restful import Resource, abort
|
||||
from sqlalchemy import cast
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy_utils.functions import sort_query
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication import current_org
|
||||
from redash.models import db
|
||||
from redash.tasks import record_event as record_event_task
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils.query_order import sort_query
|
||||
|
||||
routes = Blueprint("redash", __name__, template_folder=settings.fix_assets_path("templates"))
|
||||
|
||||
|
||||
@@ -42,6 +42,7 @@ class WidgetListResource(BaseResource):
|
||||
|
||||
widget = models.Widget(**widget_properties)
|
||||
models.db.session.add(widget)
|
||||
models.db.session.commit()
|
||||
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
|
||||
@@ -5,7 +5,7 @@ from flask import g, has_request_context
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.orm.util import _ORMJoin
|
||||
from sqlalchemy.sql.selectable import Alias, Join
|
||||
from sqlalchemy.sql.selectable import Alias
|
||||
|
||||
from redash import statsd_client
|
||||
|
||||
@@ -18,7 +18,7 @@ def _table_name_from_select_element(elt):
|
||||
if isinstance(t, Alias):
|
||||
t = t.original.froms[0]
|
||||
|
||||
while isinstance(t, _ORMJoin) or isinstance(t, Join):
|
||||
while isinstance(t, _ORMJoin):
|
||||
t = t.left
|
||||
|
||||
return t.name
|
||||
|
||||
@@ -48,7 +48,6 @@ from redash.models.parameterized_query import (
|
||||
from redash.models.types import (
|
||||
Configuration,
|
||||
EncryptedConfiguration,
|
||||
JSONText,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
json_cast_property,
|
||||
@@ -316,7 +315,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
data_source = db.relationship(DataSource, backref=backref("query_results"))
|
||||
query_hash = Column(db.String(32), index=True)
|
||||
query_text = Column("query", db.Text)
|
||||
data = Column(JSONText, nullable=True)
|
||||
data = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
runtime = Column(DOUBLE_PRECISION)
|
||||
retrieved_at = Column(db.DateTime(True))
|
||||
|
||||
@@ -387,10 +386,6 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
|
||||
|
||||
def should_schedule_next(previous_iteration, now, interval, time=None, day_of_week=None, failures=0):
|
||||
# if previous_iteration is None, it means the query has never been run before
|
||||
# so we should schedule it immediately
|
||||
if previous_iteration is None:
|
||||
return True
|
||||
# if time exists then interval > 23 hours (82800s)
|
||||
# if day_of_week exists then interval > 6 days (518400s)
|
||||
if time is None:
|
||||
@@ -583,8 +578,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
return [
|
||||
query
|
||||
for query in queries
|
||||
if "until" in query.schedule
|
||||
and query.schedule["until"] is not None
|
||||
if query.schedule["until"] is not None
|
||||
and pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d")) <= now
|
||||
]
|
||||
|
||||
@@ -606,11 +600,6 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
if query.schedule.get("disabled"):
|
||||
continue
|
||||
|
||||
# Skip queries that have None for all schedule values. It's unclear whether this
|
||||
# something that can happen in practice, but we have a test case for it.
|
||||
if all(value is None for value in query.schedule.values()):
|
||||
continue
|
||||
|
||||
if query.schedule["until"]:
|
||||
schedule_until = pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d"))
|
||||
|
||||
@@ -622,7 +611,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
)
|
||||
|
||||
if should_schedule_next(
|
||||
retrieved_at,
|
||||
retrieved_at or now,
|
||||
now,
|
||||
query.schedule["interval"],
|
||||
query.schedule["time"],
|
||||
@@ -925,8 +914,6 @@ def next_state(op, value, threshold):
|
||||
|
||||
if op(value, threshold):
|
||||
new_state = Alert.TRIGGERED_STATE
|
||||
elif not value_is_number and op not in [OPERATORS.get("!="), OPERATORS.get("=="), OPERATORS.get("equals")]:
|
||||
new_state = Alert.UNKNOWN_STATE
|
||||
else:
|
||||
new_state = Alert.OK_STATE
|
||||
|
||||
@@ -938,7 +925,6 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
UNKNOWN_STATE = "unknown"
|
||||
OK_STATE = "ok"
|
||||
TRIGGERED_STATE = "triggered"
|
||||
TEST_STATE = "test"
|
||||
|
||||
id = primary_key("Alert")
|
||||
name = Column(db.String(255))
|
||||
@@ -968,38 +954,17 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
return super(Alert, cls).get_by_id_and_org(object_id, org, Query)
|
||||
|
||||
def evaluate(self):
|
||||
data = self.query_rel.latest_query_data.data if self.query_rel.latest_query_data else None
|
||||
new_state = self.UNKNOWN_STATE
|
||||
data = self.query_rel.latest_query_data.data
|
||||
|
||||
if data and data["rows"] and self.options["column"] in data["rows"][0]:
|
||||
if data["rows"] and self.options["column"] in data["rows"][0]:
|
||||
op = OPERATORS.get(self.options["op"], lambda v, t: False)
|
||||
|
||||
if "selector" not in self.options:
|
||||
selector = "first"
|
||||
else:
|
||||
selector = self.options["selector"]
|
||||
|
||||
try:
|
||||
if selector == "max":
|
||||
max_val = float("-inf")
|
||||
for i in range(len(data["rows"])):
|
||||
max_val = max(max_val, float(data["rows"][i][self.options["column"]]))
|
||||
value = max_val
|
||||
elif selector == "min":
|
||||
min_val = float("inf")
|
||||
for i in range(len(data["rows"])):
|
||||
min_val = min(min_val, float(data["rows"][i][self.options["column"]]))
|
||||
value = min_val
|
||||
else:
|
||||
value = data["rows"][0][self.options["column"]]
|
||||
|
||||
except ValueError:
|
||||
return self.UNKNOWN_STATE
|
||||
|
||||
value = data["rows"][0][self.options["column"]]
|
||||
threshold = self.options["value"]
|
||||
|
||||
if value is not None:
|
||||
new_state = next_state(op, value, threshold)
|
||||
new_state = next_state(op, value, threshold)
|
||||
else:
|
||||
new_state = self.UNKNOWN_STATE
|
||||
|
||||
return new_state
|
||||
|
||||
@@ -1022,11 +987,11 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
result_table = [] # A two-dimensional array which can rendered as a table in Mustache
|
||||
for row in data["rows"]:
|
||||
result_table.append([row[col["name"]] for col in data["columns"]])
|
||||
|
||||
context = {
|
||||
"ALERT_NAME": self.name,
|
||||
"ALERT_URL": "{host}/alerts/{alert_id}".format(host=host, alert_id=self.id),
|
||||
"ALERT_STATUS": self.state.upper(),
|
||||
"ALERT_SELECTOR": self.options["selector"],
|
||||
"ALERT_CONDITION": self.options["op"],
|
||||
"ALERT_THRESHOLD": self.options["value"],
|
||||
"QUERY_NAME": self.query_rel.name,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import re
|
||||
from functools import partial
|
||||
from numbers import Number
|
||||
|
||||
@@ -89,16 +88,6 @@ def _is_number(string):
|
||||
return True
|
||||
|
||||
|
||||
def _is_regex_pattern(value, regex):
|
||||
try:
|
||||
if re.compile(regex).fullmatch(value):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except re.error:
|
||||
return False
|
||||
|
||||
|
||||
def _is_date(string):
|
||||
parse(string)
|
||||
return True
|
||||
@@ -146,7 +135,6 @@ class ParameterizedQuery:
|
||||
|
||||
enum_options = definition.get("enumOptions")
|
||||
query_id = definition.get("queryId")
|
||||
regex = definition.get("regex")
|
||||
allow_multiple_values = isinstance(definition.get("multiValuesOptions"), dict)
|
||||
|
||||
if isinstance(enum_options, str):
|
||||
@@ -154,7 +142,6 @@ class ParameterizedQuery:
|
||||
|
||||
validators = {
|
||||
"text": lambda value: isinstance(value, str),
|
||||
"text-pattern": lambda value: _is_regex_pattern(value, regex),
|
||||
"number": _is_number,
|
||||
"enum": lambda value: _is_value_within_options(value, enum_options, allow_multiple_values),
|
||||
"query": lambda value: _is_value_within_options(
|
||||
|
||||
@@ -3,7 +3,6 @@ from sqlalchemy.ext.mutable import Mutable
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
from sqlalchemy_utils import EncryptedType
|
||||
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
|
||||
from .base import db
|
||||
@@ -29,22 +28,6 @@ class EncryptedConfiguration(EncryptedType):
|
||||
)
|
||||
|
||||
|
||||
# Utilized for cases when JSON size is bigger than JSONB (255MB) or JSON (10MB) limit
|
||||
class JSONText(TypeDecorator):
|
||||
impl = db.Text
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return json_dumps(value)
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
if not value:
|
||||
return value
|
||||
return json_loads(value)
|
||||
|
||||
|
||||
class MutableDict(Mutable, dict):
|
||||
@classmethod
|
||||
def coerce(cls, key, value):
|
||||
|
||||
@@ -166,7 +166,7 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
if self._profile_image_url:
|
||||
return self._profile_image_url
|
||||
|
||||
email_md5 = hashlib.md5(self.email.lower().encode(), usedforsecurity=False).hexdigest()
|
||||
email_md5 = hashlib.md5(self.email.lower().encode()).hexdigest()
|
||||
return "https://www.gravatar.com/avatar/{}?s=40&d=identicon".format(email_md5)
|
||||
|
||||
@property
|
||||
@@ -233,9 +233,7 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
return AccessPermission.exists(obj, access_type, grantee=self)
|
||||
|
||||
def get_id(self):
|
||||
identity = hashlib.md5(
|
||||
"{},{}".format(self.email, self.password_hash).encode(), usedforsecurity=False
|
||||
).hexdigest()
|
||||
identity = hashlib.md5("{},{}".format(self.email, self.password_hash).encode()).hexdigest()
|
||||
return "{0}-{1}".format(self.id, identity)
|
||||
|
||||
def get_actual_user(self):
|
||||
|
||||
@@ -59,7 +59,7 @@ def get_status():
|
||||
|
||||
|
||||
def rq_job_ids():
|
||||
queues = Queue.all(connection=rq_redis_connection)
|
||||
queues = Queue.all(connection=redis_connection)
|
||||
|
||||
started_jobs = [StartedJobRegistry(queue=q).get_job_ids() for q in queues]
|
||||
queued_jobs = [q.job_ids for q in queues]
|
||||
|
||||
@@ -119,7 +119,6 @@ class BaseQueryRunner:
|
||||
noop_query = None
|
||||
limit_query = " LIMIT 1000"
|
||||
limit_keywords = ["LIMIT", "OFFSET"]
|
||||
limit_after_select = False
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.syntax = "sql"
|
||||
@@ -302,19 +301,10 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
parsed_query = sqlparse.parse(query)[0]
|
||||
limit_tokens = sqlparse.parse(self.limit_query)[0].tokens
|
||||
length = len(parsed_query.tokens)
|
||||
if not self.limit_after_select:
|
||||
if parsed_query.tokens[length - 1].ttype == sqlparse.tokens.Punctuation:
|
||||
parsed_query.tokens[length - 1 : length - 1] = limit_tokens
|
||||
else:
|
||||
parsed_query.tokens += limit_tokens
|
||||
if parsed_query.tokens[length - 1].ttype == sqlparse.tokens.Punctuation:
|
||||
parsed_query.tokens[length - 1 : length - 1] = limit_tokens
|
||||
else:
|
||||
for i in range(length - 1, -1, -1):
|
||||
if parsed_query[i].value.upper() == "SELECT":
|
||||
index = parsed_query.token_index(parsed_query[i + 1])
|
||||
parsed_query = sqlparse.sql.Statement(
|
||||
parsed_query.tokens[:index] + limit_tokens + parsed_query.tokens[index:]
|
||||
)
|
||||
break
|
||||
parsed_query.tokens += limit_tokens
|
||||
return str(parsed_query)
|
||||
|
||||
def apply_auto_limit(self, query_text, should_apply_auto_limit):
|
||||
|
||||
@@ -63,8 +63,5 @@ class AmazonElasticsearchService(ElasticSearch2):
|
||||
|
||||
self.auth = AWSV4Sign(cred, region, "es")
|
||||
|
||||
def get_auth(self):
|
||||
return self.auth
|
||||
|
||||
|
||||
register(AmazonElasticsearchService)
|
||||
|
||||
@@ -76,10 +76,6 @@ class Athena(BaseQueryRunner):
|
||||
"default": "default",
|
||||
},
|
||||
"glue": {"type": "boolean", "title": "Use Glue Data Catalog"},
|
||||
"catalog_ids": {
|
||||
"type": "string",
|
||||
"title": "Enter Glue Data Catalog IDs, separated by commas (leave blank for default catalog)",
|
||||
},
|
||||
"work_group": {
|
||||
"type": "string",
|
||||
"title": "Athena Work Group",
|
||||
@@ -90,26 +86,15 @@ class Athena(BaseQueryRunner):
|
||||
"title": "Athena cost per Tb scanned (USD)",
|
||||
"default": 5,
|
||||
},
|
||||
"result_reuse_enable": {
|
||||
"type": "boolean",
|
||||
"title": "Reuse Athena query results",
|
||||
},
|
||||
"result_reuse_minutes": {
|
||||
"type": "number",
|
||||
"title": "Minutes to reuse Athena query results",
|
||||
"default": 60,
|
||||
},
|
||||
},
|
||||
"required": ["region", "s3_staging_dir"],
|
||||
"extra_options": ["glue", "catalog_ids", "cost_per_tb", "result_reuse_enable", "result_reuse_minutes"],
|
||||
"extra_options": ["glue", "cost_per_tb"],
|
||||
"order": [
|
||||
"region",
|
||||
"s3_staging_dir",
|
||||
"schema",
|
||||
"work_group",
|
||||
"cost_per_tb",
|
||||
"result_reuse_enable",
|
||||
"result_reuse_minutes",
|
||||
],
|
||||
"secret": ["aws_secret_key"],
|
||||
}
|
||||
@@ -187,53 +172,35 @@ class Athena(BaseQueryRunner):
|
||||
"region_name": self.configuration["region"],
|
||||
}
|
||||
|
||||
def __get_schema_from_glue(self, catalog_id=""):
|
||||
def __get_schema_from_glue(self):
|
||||
client = boto3.client("glue", **self._get_iam_credentials())
|
||||
schema = {}
|
||||
|
||||
database_paginator = client.get_paginator("get_databases")
|
||||
table_paginator = client.get_paginator("get_tables")
|
||||
|
||||
databases_iterator = database_paginator.paginate(
|
||||
**({"CatalogId": catalog_id} if catalog_id != "" else {}),
|
||||
)
|
||||
|
||||
for databases in databases_iterator:
|
||||
for databases in database_paginator.paginate():
|
||||
for database in databases["DatabaseList"]:
|
||||
iterator = table_paginator.paginate(
|
||||
DatabaseName=database["Name"],
|
||||
**({"CatalogId": catalog_id} if catalog_id != "" else {}),
|
||||
)
|
||||
iterator = table_paginator.paginate(DatabaseName=database["Name"])
|
||||
for table in iterator.search("TableList[]"):
|
||||
table_name = "%s.%s" % (database["Name"], table["Name"])
|
||||
if "StorageDescriptor" not in table:
|
||||
logger.warning("Glue table doesn't have StorageDescriptor: %s", table_name)
|
||||
continue
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
for column_data in table["StorageDescriptor"]["Columns"]:
|
||||
column = {
|
||||
"name": column_data["Name"],
|
||||
"type": column_data["Type"] if "Type" in column_data else None,
|
||||
}
|
||||
schema[table_name]["columns"].append(column)
|
||||
for partition in table.get("PartitionKeys", []):
|
||||
partition_column = {
|
||||
"name": partition["Name"],
|
||||
"type": partition["Type"] if "Type" in partition else None,
|
||||
}
|
||||
schema[table_name]["columns"].append(partition_column)
|
||||
column = [columns["Name"] for columns in table["StorageDescriptor"]["Columns"]]
|
||||
schema[table_name] = {"name": table_name, "columns": column}
|
||||
for partition in table.get("PartitionKeys", []):
|
||||
schema[table_name]["columns"].append(partition["Name"])
|
||||
return list(schema.values())
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
if self.configuration.get("glue", False):
|
||||
catalog_ids = [id.strip() for id in self.configuration.get("catalog_ids", "").split(",")]
|
||||
return sum([self.__get_schema_from_glue(catalog_id) for catalog_id in catalog_ids], [])
|
||||
return self.__get_schema_from_glue()
|
||||
|
||||
schema = {}
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name, data_type
|
||||
SELECT table_schema, table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('information_schema')
|
||||
"""
|
||||
@@ -246,7 +213,7 @@ class Athena(BaseQueryRunner):
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
schema[table_name]["columns"].append({"name": row["column_name"], "type": row["data_type"]})
|
||||
schema[table_name]["columns"].append(row["column_name"])
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
@@ -258,8 +225,6 @@ class Athena(BaseQueryRunner):
|
||||
kms_key=self.configuration.get("kms_key", None),
|
||||
work_group=self.configuration.get("work_group", "primary"),
|
||||
formatter=SimpleFormatter(),
|
||||
result_reuse_enable=self.configuration.get("result_reuse_enable", False),
|
||||
result_reuse_minutes=self.configuration.get("result_reuse_minutes", 60),
|
||||
**self._get_iam_credentials(user=user),
|
||||
).cursor()
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ from base64 import b64decode
|
||||
from redash import settings
|
||||
from redash.query_runner import (
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_DATE,
|
||||
TYPE_DATETIME,
|
||||
TYPE_FLOAT,
|
||||
TYPE_INTEGER,
|
||||
@@ -38,8 +37,6 @@ types_map = {
|
||||
"BOOLEAN": TYPE_BOOLEAN,
|
||||
"STRING": TYPE_STRING,
|
||||
"TIMESTAMP": TYPE_DATETIME,
|
||||
"DATETIME": TYPE_DATETIME,
|
||||
"DATE": TYPE_DATE,
|
||||
}
|
||||
|
||||
|
||||
@@ -103,7 +100,7 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self.should_annotate_query = configuration.get("useQueryAnnotation", False)
|
||||
self.should_annotate_query = configuration["useQueryAnnotation"]
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
|
||||
@@ -26,6 +26,12 @@ def generate_ssl_options_dict(protocol, cert_path=None):
|
||||
return ssl_options
|
||||
|
||||
|
||||
def json_encoder(dec, o):
|
||||
if isinstance(o, sortedset):
|
||||
return list(o)
|
||||
return None
|
||||
|
||||
|
||||
class Cassandra(BaseQueryRunner):
|
||||
noop_query = "SELECT dateof(now()) FROM system.local"
|
||||
|
||||
@@ -33,12 +39,6 @@ class Cassandra(BaseQueryRunner):
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def custom_json_encoder(cls, dec, o):
|
||||
if isinstance(o, sortedset):
|
||||
return list(o)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
|
||||
@@ -121,7 +121,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
verify=verify,
|
||||
)
|
||||
|
||||
if not r.ok:
|
||||
if r.status_code != 200:
|
||||
raise Exception(r.text)
|
||||
|
||||
# In certain situations the response body can be empty even if the query was successful, for example
|
||||
@@ -129,11 +129,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
if not r.text:
|
||||
return {}
|
||||
|
||||
response = r.json()
|
||||
if "exception" in response:
|
||||
raise Exception(response["exception"])
|
||||
|
||||
return response
|
||||
return r.json()
|
||||
except requests.RequestException as e:
|
||||
if e.response:
|
||||
details = "({}, Status Code: {})".format(e.__class__.__name__, e.response.status_code)
|
||||
|
||||
@@ -91,7 +91,7 @@ class BaseElasticSearch(BaseQueryRunner):
|
||||
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.server_url = self.configuration.get("server", "")
|
||||
self.server_url = self.configuration["server"]
|
||||
if self.server_url[-1] == "/":
|
||||
self.server_url = self.server_url[:-1]
|
||||
|
||||
@@ -129,8 +129,6 @@ class BaseElasticSearch(BaseQueryRunner):
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
if not isinstance(index_mappings["mappings"][m], dict):
|
||||
continue
|
||||
if "properties" not in index_mappings["mappings"][m]:
|
||||
continue
|
||||
for property_name in index_mappings["mappings"][m]["properties"]:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Tuple
|
||||
|
||||
@@ -46,7 +45,7 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
||||
self.syntax = "json"
|
||||
|
||||
def get_response(self, url, auth=None, http_method="get", **kwargs):
|
||||
url = "{}{}".format(self.configuration["server"], url)
|
||||
url = "{}{}".format(self.configuration["url"], url)
|
||||
headers = kwargs.pop("headers", {})
|
||||
headers["Accept"] = "application/json"
|
||||
return super().get_response(url, auth, http_method, headers=headers, **kwargs)
|
||||
@@ -65,7 +64,6 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
||||
return data, error
|
||||
|
||||
def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
|
||||
query = json.loads(query)
|
||||
index_name = query.pop("index", "")
|
||||
result_fields = query.pop("result_fields", None)
|
||||
url = "/{}/_search".format(index_name)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -42,6 +42,16 @@ TYPES_MAP = {
|
||||
}
|
||||
|
||||
|
||||
def json_encoder(dec, o):
|
||||
if isinstance(o, ObjectId):
|
||||
return str(o)
|
||||
elif isinstance(o, Timestamp):
|
||||
return dec.default(o.as_datetime())
|
||||
elif isinstance(o, Decimal128):
|
||||
return o.to_decimal()
|
||||
return None
|
||||
|
||||
|
||||
date_regex = re.compile(r'ISODate\("(.*)"\)', re.IGNORECASE)
|
||||
|
||||
|
||||
@@ -69,7 +79,7 @@ def datetime_parser(dct):
|
||||
return bson_object_hook(dct, json_options=opts)
|
||||
|
||||
|
||||
def parse_query_json(query: str):
|
||||
def parse_query_json(query):
|
||||
query_data = json_loads(query, object_hook=datetime_parser)
|
||||
return query_data
|
||||
|
||||
@@ -82,66 +92,40 @@ def _get_column_by_name(columns, column_name):
|
||||
return None
|
||||
|
||||
|
||||
def _parse_dict(dic: dict, flatten: bool = False) -> dict:
|
||||
def _parse_dict(dic):
|
||||
res = {}
|
||||
|
||||
def _flatten(x, name=""):
|
||||
if isinstance(x, dict):
|
||||
for k, v in x.items():
|
||||
_flatten(v, "{}.{}".format(name, k))
|
||||
elif isinstance(x, list):
|
||||
for idx, item in enumerate(x):
|
||||
_flatten(item, "{}.{}".format(name, idx))
|
||||
for key, value in dic.items():
|
||||
if isinstance(value, dict):
|
||||
for tmp_key, tmp_value in _parse_dict(value).items():
|
||||
new_key = "{}.{}".format(key, tmp_key)
|
||||
res[new_key] = tmp_value
|
||||
else:
|
||||
res[name[1:]] = x
|
||||
|
||||
if flatten:
|
||||
_flatten(dic)
|
||||
else:
|
||||
for key, value in dic.items():
|
||||
if isinstance(value, dict):
|
||||
for tmp_key, tmp_value in _parse_dict(value).items():
|
||||
new_key = "{}.{}".format(key, tmp_key)
|
||||
res[new_key] = tmp_value
|
||||
else:
|
||||
res[key] = value
|
||||
res[key] = value
|
||||
return res
|
||||
|
||||
|
||||
def parse_results(results: list, flatten: bool = False) -> list:
|
||||
def parse_results(results):
|
||||
rows = []
|
||||
columns = []
|
||||
|
||||
for row in results:
|
||||
parsed_row = {}
|
||||
|
||||
parsed_row = _parse_dict(row, flatten)
|
||||
parsed_row = _parse_dict(row)
|
||||
for column_name, value in parsed_row.items():
|
||||
if _get_column_by_name(columns, column_name) is None:
|
||||
columns.append(
|
||||
{
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(type(value), TYPE_STRING),
|
||||
}
|
||||
)
|
||||
columns.append(
|
||||
{
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(type(value), TYPE_STRING),
|
||||
}
|
||||
)
|
||||
|
||||
rows.append(parsed_row)
|
||||
|
||||
return rows, columns
|
||||
|
||||
|
||||
def _sorted_fields(fields):
|
||||
ord = {}
|
||||
for k, v in fields.items():
|
||||
if isinstance(v, int):
|
||||
ord[k] = v
|
||||
else:
|
||||
ord[k] = len(fields)
|
||||
|
||||
return sorted(ord, key=ord.get)
|
||||
|
||||
|
||||
class MongoDB(BaseQueryRunner):
|
||||
should_annotate_query = False
|
||||
|
||||
@@ -166,14 +150,6 @@ class MongoDB(BaseQueryRunner):
|
||||
],
|
||||
"title": "Replica Set Read Preference",
|
||||
},
|
||||
"flatten": {
|
||||
"type": "string",
|
||||
"extendedEnum": [
|
||||
{"value": "False", "name": "False"},
|
||||
{"value": "True", "name": "True"},
|
||||
],
|
||||
"title": "Flatten Results",
|
||||
},
|
||||
},
|
||||
"secret": ["password"],
|
||||
"required": ["connectionString", "dbName"],
|
||||
@@ -188,25 +164,12 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
self.db_name = self.configuration.get("dbName", "")
|
||||
self.db_name = self.configuration["dbName"]
|
||||
|
||||
self.is_replica_set = (
|
||||
True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
)
|
||||
|
||||
self.flatten = self.configuration.get("flatten", "False").upper() in ["TRUE", "YES", "ON", "1", "Y", "T"]
|
||||
logger.debug("flatten: {}".format(self.flatten))
|
||||
|
||||
@classmethod
|
||||
def custom_json_encoder(cls, dec, o):
|
||||
if isinstance(o, ObjectId):
|
||||
return str(o)
|
||||
elif isinstance(o, Timestamp):
|
||||
return dec.default(o.as_datetime())
|
||||
elif isinstance(o, Decimal128):
|
||||
return o.to_decimal()
|
||||
return None
|
||||
|
||||
def _get_db(self):
|
||||
kwargs = {}
|
||||
if self.is_replica_set:
|
||||
@@ -315,10 +278,8 @@ class MongoDB(BaseQueryRunner):
|
||||
if "$sort" in step:
|
||||
sort_list = []
|
||||
for sort_item in step["$sort"]:
|
||||
if isinstance(sort_item, dict):
|
||||
sort_list.append((sort_item["name"], sort_item.get("direction", 1)))
|
||||
elif isinstance(sort_item, list):
|
||||
sort_list.append(tuple(sort_item))
|
||||
sort_list.append((sort_item["name"], sort_item["direction"]))
|
||||
|
||||
step["$sort"] = SON(sort_list)
|
||||
|
||||
if "fields" in query_data:
|
||||
@@ -328,10 +289,7 @@ class MongoDB(BaseQueryRunner):
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field_data in query_data["sort"]:
|
||||
if isinstance(field_data, dict):
|
||||
s.append((field_data["name"], field_data.get("direction", 1)))
|
||||
elif isinstance(field_data, list):
|
||||
s.append(tuple(field_data))
|
||||
s.append((field_data["name"], field_data["direction"]))
|
||||
|
||||
columns = []
|
||||
rows = []
|
||||
@@ -372,17 +330,16 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
rows.append({"count": cursor})
|
||||
else:
|
||||
rows, columns = parse_results(cursor, flatten=self.flatten)
|
||||
rows, columns = parse_results(cursor)
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
for k in _sorted_fields(f):
|
||||
for k in sorted(f, key=f.get):
|
||||
column = _get_column_by_name(columns, k)
|
||||
if column:
|
||||
ordered_columns.append(column)
|
||||
|
||||
columns = ordered_columns
|
||||
logger.debug("columns: {}".format(columns))
|
||||
|
||||
if query_data.get("sortColumns"):
|
||||
reverse = query_data["sortColumns"] == "desc"
|
||||
|
||||
@@ -34,10 +34,6 @@ class SqlServer(BaseSQLQueryRunner):
|
||||
should_annotate_query = False
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
limit_query = " TOP 1000"
|
||||
limit_keywords = ["TOP"]
|
||||
limit_after_select = True
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
|
||||
@@ -21,10 +21,6 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
||||
should_annotate_query = False
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
limit_query = " TOP 1000"
|
||||
limit_keywords = ["TOP"]
|
||||
limit_after_select = True
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
@@ -48,7 +44,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
||||
"verify_ssl": {
|
||||
"type": "boolean",
|
||||
"title": "Verify SSL certificate",
|
||||
"default": False,
|
||||
"default": True,
|
||||
},
|
||||
},
|
||||
"order": [
|
||||
@@ -120,29 +116,14 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
||||
db = self.configuration["db"]
|
||||
port = self.configuration.get("port", 1433)
|
||||
|
||||
connection_params = {
|
||||
"Driver": "{ODBC Driver 18 for SQL Server}",
|
||||
"Server": server,
|
||||
"Port": port,
|
||||
"Database": db,
|
||||
"Uid": user,
|
||||
"Pwd": password,
|
||||
}
|
||||
connection_string_fmt = "DRIVER={{ODBC Driver 17 for SQL Server}};SERVER={},{};DATABASE={};UID={};PWD={}"
|
||||
connection_string = connection_string_fmt.format(server, port, db, user, password)
|
||||
|
||||
if self.configuration.get("use_ssl", False):
|
||||
connection_params["Encrypt"] = "YES"
|
||||
connection_string += ";Encrypt=YES"
|
||||
|
||||
if not self.configuration.get("verify_ssl"):
|
||||
connection_params["TrustServerCertificate"] = "YES"
|
||||
else:
|
||||
connection_params["TrustServerCertificate"] = "NO"
|
||||
else:
|
||||
connection_params["Encrypt"] = "NO"
|
||||
|
||||
def fn(k):
|
||||
return "{}={}".format(k, connection_params[k])
|
||||
|
||||
connection_string = ";".join(list(map(fn, connection_params)))
|
||||
connection_string += ";TrustServerCertificate=YES"
|
||||
|
||||
connection = pyodbc.connect(connection_string)
|
||||
cursor = connection.cursor()
|
||||
|
||||
@@ -55,6 +55,18 @@ types_map = {
|
||||
}
|
||||
|
||||
|
||||
def json_encoder(dec, o):
|
||||
if isinstance(o, Range):
|
||||
# From: https://github.com/psycopg/psycopg2/pull/779
|
||||
if o._bounds is None:
|
||||
return ""
|
||||
|
||||
items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
|
||||
|
||||
return "".join(items)
|
||||
return None
|
||||
|
||||
|
||||
def _wait(conn, timeout=None):
|
||||
while 1:
|
||||
try:
|
||||
@@ -183,18 +195,6 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
def type(cls):
|
||||
return "pg"
|
||||
|
||||
@classmethod
|
||||
def custom_json_encoder(cls, dec, o):
|
||||
if isinstance(o, Range):
|
||||
# From: https://github.com/psycopg/psycopg2/pull/779
|
||||
if o._bounds is None:
|
||||
return ""
|
||||
|
||||
items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
|
||||
|
||||
return "".join(items)
|
||||
return None
|
||||
|
||||
def _get_definitions(self, schema, query):
|
||||
results, error = self.run_query(query, None)
|
||||
|
||||
@@ -231,9 +231,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
ON a.attrelid = c.oid
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
WHERE c.relkind IN ('m', 'f', 'p')
|
||||
AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
||||
AND has_schema_privilege(s.nspname, 'usage')
|
||||
WHERE c.relkind IN ('m', 'f', 'p') AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
||||
|
||||
UNION
|
||||
|
||||
@@ -388,13 +386,12 @@ class Redshift(PostgreSQL):
|
||||
SELECT DISTINCT table_name,
|
||||
table_schema,
|
||||
column_name,
|
||||
data_type,
|
||||
ordinal_position AS pos
|
||||
FROM svv_columns
|
||||
WHERE table_schema NOT IN ('pg_internal','pg_catalog','information_schema')
|
||||
AND table_schema NOT LIKE 'pg_temp_%'
|
||||
)
|
||||
SELECT table_name, table_schema, column_name, data_type
|
||||
SELECT table_name, table_schema, column_name
|
||||
FROM tables
|
||||
WHERE
|
||||
HAS_SCHEMA_PRIVILEGE(table_schema, 'USAGE') AND
|
||||
|
||||
169
redash/query_runner/qubole.py
Normal file
169
redash/query_runner/qubole.py
Normal file
@@ -0,0 +1,169 @@
|
||||
import logging
|
||||
import time
|
||||
from io import StringIO
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import (
|
||||
TYPE_STRING,
|
||||
BaseQueryRunner,
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
|
||||
try:
|
||||
import qds_sdk # noqa: F401
|
||||
from qds_sdk.commands import (
|
||||
Command,
|
||||
HiveCommand,
|
||||
PrestoCommand,
|
||||
SqlCommand,
|
||||
)
|
||||
from qds_sdk.qubole import Qubole as qbol
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
|
||||
class Qubole(BaseQueryRunner):
|
||||
should_annotate_query = False
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query_type": {
|
||||
"type": "string",
|
||||
"title": "Query Type (quantum / presto / hive)",
|
||||
"default": "hive",
|
||||
},
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"title": "API Endpoint",
|
||||
"default": "https://api.qubole.com",
|
||||
},
|
||||
"token": {"type": "string", "title": "Auth Token"},
|
||||
"cluster": {
|
||||
"type": "string",
|
||||
"title": "Cluster Label",
|
||||
"default": "default",
|
||||
},
|
||||
},
|
||||
"order": ["query_type", "endpoint", "token", "cluster"],
|
||||
"required": ["endpoint", "token"],
|
||||
"secret": ["token"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "qubole"
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return "Qubole"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
def test_connection(self):
|
||||
headers = self._get_header()
|
||||
r = requests.head("%s/api/latest/users" % self.configuration.get("endpoint"), headers=headers)
|
||||
r.status_code == 200
|
||||
|
||||
def run_query(self, query, user):
|
||||
qbol.configure(
|
||||
api_token=self.configuration.get("token"),
|
||||
api_url="%s/api" % self.configuration.get("endpoint"),
|
||||
)
|
||||
|
||||
try:
|
||||
query_type = self.configuration.get("query_type", "hive")
|
||||
|
||||
if query_type == "quantum":
|
||||
cmd = SqlCommand.create(query=query)
|
||||
elif query_type == "hive":
|
||||
cmd = HiveCommand.create(query=query, label=self.configuration.get("cluster"))
|
||||
elif query_type == "presto":
|
||||
cmd = PrestoCommand.create(query=query, label=self.configuration.get("cluster"))
|
||||
else:
|
||||
raise Exception(
|
||||
"Invalid Query Type:%s.\
|
||||
It must be : hive / presto / quantum."
|
||||
% self.configuration.get("query_type")
|
||||
)
|
||||
|
||||
logging.info("Qubole command created with Id: %s and Status: %s", cmd.id, cmd.status)
|
||||
|
||||
while not Command.is_done(cmd.status):
|
||||
time.sleep(qbol.poll_interval)
|
||||
cmd = Command.find(cmd.id)
|
||||
logging.info("Qubole command Id: %s and Status: %s", cmd.id, cmd.status)
|
||||
|
||||
rows = []
|
||||
columns = []
|
||||
error = None
|
||||
|
||||
if cmd.status == "done":
|
||||
fp = StringIO()
|
||||
cmd.get_results(
|
||||
fp=fp,
|
||||
inline=True,
|
||||
delim="\t",
|
||||
fetch=False,
|
||||
qlog=None,
|
||||
arguments=["true"],
|
||||
)
|
||||
|
||||
results = fp.getvalue()
|
||||
fp.close()
|
||||
|
||||
data = results.split("\r\n")
|
||||
columns = self.fetch_columns([(i, TYPE_STRING) for i in data.pop(0).split("\t")])
|
||||
rows = [dict(zip((column["name"] for column in columns), row.split("\t"))) for row in data]
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
except (KeyboardInterrupt, JobTimeoutException):
|
||||
logging.info("Sending KILL signal to Qubole Command Id: %s", cmd.id)
|
||||
cmd.cancel()
|
||||
raise
|
||||
|
||||
return data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
schemas = {}
|
||||
try:
|
||||
headers = self._get_header()
|
||||
content = requests.get(
|
||||
"%s/api/latest/hive?describe=true&per_page=10000" % self.configuration.get("endpoint"),
|
||||
headers=headers,
|
||||
)
|
||||
data = content.json()
|
||||
|
||||
for schema in data["schemas"]:
|
||||
tables = data["schemas"][schema]
|
||||
for table in tables:
|
||||
table_name = list(table.keys())[0]
|
||||
columns = [f["name"] for f in table[table_name]["columns"]]
|
||||
|
||||
if schema != "default":
|
||||
table_name = "{}.{}".format(schema, table_name)
|
||||
|
||||
schemas[table_name] = {"name": table_name, "columns": columns}
|
||||
|
||||
except Exception as e:
|
||||
logging.error("Failed to get schema information from Qubole. Error {}".format(str(e)))
|
||||
|
||||
return list(schemas.values())
|
||||
|
||||
def _get_header(self):
|
||||
return {
|
||||
"Content-type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"X-AUTH-TOKEN": self.configuration.get("token"),
|
||||
}
|
||||
|
||||
|
||||
register(Qubole)
|
||||
@@ -1,5 +1,3 @@
|
||||
import datetime
|
||||
import decimal
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
@@ -90,9 +88,7 @@ def create_tables_from_query_ids(user, connection, query_ids, query_params, cach
|
||||
|
||||
for query in set(query_params):
|
||||
results = get_query_results(user, query[0], False, query[1])
|
||||
table_hash = hashlib.md5(
|
||||
"query_{query}_{hash}".format(query=query[0], hash=query[1]).encode(), usedforsecurity=False
|
||||
).hexdigest()
|
||||
table_hash = hashlib.md5("query_{query}_{hash}".format(query=query[0], hash=query[1]).encode()).hexdigest()
|
||||
table_name = "query_{query_id}_{param_hash}".format(query_id=query[0], param_hash=table_hash)
|
||||
create_table(connection, table_name, results)
|
||||
|
||||
@@ -109,10 +105,6 @@ def fix_column_name(name):
|
||||
def flatten(value):
|
||||
if isinstance(value, (list, dict)):
|
||||
return json_dumps(value)
|
||||
elif isinstance(value, decimal.Decimal):
|
||||
return float(value)
|
||||
elif isinstance(value, datetime.timedelta):
|
||||
return str(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
@@ -144,9 +136,7 @@ def create_table(connection, table_name, query_results):
|
||||
|
||||
def prepare_parameterized_query(query, query_params):
|
||||
for params in query_params:
|
||||
table_hash = hashlib.md5(
|
||||
"query_{query}_{hash}".format(query=params[0], hash=params[1]).encode(), usedforsecurity=False
|
||||
).hexdigest()
|
||||
table_hash = hashlib.md5("query_{query}_{hash}".format(query=params[0], hash=params[1]).encode()).hexdigest()
|
||||
key = "param_query_{query_id}_{{{param_string}}}".format(query_id=params[0], param_string=params[1])
|
||||
value = "query_{query_id}_{param_hash}".format(query_id=params[0], param_hash=table_hash)
|
||||
query = query.replace(key, value)
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
from redash.query_runner import register
|
||||
from redash.query_runner.pg import PostgreSQL
|
||||
|
||||
|
||||
class RisingWave(PostgreSQL):
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "risingwave"
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return "RisingWave"
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query = """
|
||||
SELECT s.nspname as table_schema,
|
||||
c.relname as table_name,
|
||||
a.attname as column_name,
|
||||
null as data_type
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace s
|
||||
ON c.relnamespace = s.oid
|
||||
AND s.nspname NOT IN ('pg_catalog', 'information_schema', 'rw_catalog')
|
||||
JOIN pg_attribute a
|
||||
ON a.attrelid = c.oid
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
WHERE c.relkind IN ('m', 'f', 'p')
|
||||
|
||||
UNION
|
||||
|
||||
SELECT table_schema,
|
||||
table_name,
|
||||
column_name,
|
||||
data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema', 'rw_catalog');
|
||||
"""
|
||||
|
||||
self._get_definitions(schema, query)
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
register(RisingWave)
|
||||
@@ -55,13 +55,12 @@ class Script(BaseQueryRunner):
|
||||
def __init__(self, configuration):
|
||||
super(Script, self).__init__(configuration)
|
||||
|
||||
path = self.configuration.get("path", "")
|
||||
# If path is * allow any execution path
|
||||
if path == "*":
|
||||
if self.configuration["path"] == "*":
|
||||
return
|
||||
|
||||
# Poor man's protection against running scripts from outside the scripts directory
|
||||
if path.find("../") > -1:
|
||||
if self.configuration["path"].find("../") > -1:
|
||||
raise ValueError("Scripts can only be run from the configured scripts directory")
|
||||
|
||||
def test_connection(self):
|
||||
|
||||
@@ -28,7 +28,7 @@ class Sqlite(BaseSQLQueryRunner):
|
||||
def __init__(self, configuration):
|
||||
super(Sqlite, self).__init__(configuration)
|
||||
|
||||
self._dbpath = self.configuration.get("dbpath", "")
|
||||
self._dbpath = self.configuration["dbpath"]
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query_table = "select tbl_name from sqlite_master where type='table'"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import functools
|
||||
|
||||
from flask import session
|
||||
from flask import request, session
|
||||
from flask_login import current_user
|
||||
from flask_talisman import talisman
|
||||
from flask_wtf.csrf import CSRFProtect, generate_csrf
|
||||
@@ -35,6 +35,17 @@ def init_app(app):
|
||||
|
||||
@app.before_request
|
||||
def check_csrf():
|
||||
# BEGIN workaround until https://github.com/lepture/flask-wtf/pull/419 is merged
|
||||
if request.blueprint in csrf._exempt_blueprints:
|
||||
return
|
||||
|
||||
view = app.view_functions.get(request.endpoint)
|
||||
dest = f"{view.__module__}.{view.__name__}"
|
||||
|
||||
if dest in csrf._exempt_views:
|
||||
return
|
||||
# END workaround
|
||||
|
||||
if not current_user.is_authenticated or "user_id" in session:
|
||||
csrf.protect()
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ This will eventually replace all the `to_dict` methods of the different model
|
||||
classes we have. This will ensure cleaner code and better
|
||||
separation of concerns.
|
||||
"""
|
||||
|
||||
from flask_login import current_user
|
||||
from funcy import project
|
||||
from rq.job import JobStatus
|
||||
@@ -277,9 +276,6 @@ def serialize_job(job):
|
||||
JobStatus.STARTED: 2,
|
||||
JobStatus.FINISHED: 3,
|
||||
JobStatus.FAILED: 4,
|
||||
JobStatus.CANCELED: 5,
|
||||
JobStatus.DEFERRED: 6,
|
||||
JobStatus.SCHEDULED: 7,
|
||||
}
|
||||
|
||||
job_status = job.get_status()
|
||||
|
||||
@@ -50,7 +50,6 @@ QUERY_RESULTS_EXPIRED_TTL_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_R
|
||||
QUERY_RESULTS_EXPIRED_TTL = int(os.environ.get("REDASH_QUERY_RESULTS_EXPIRED_TTL", "86400"))
|
||||
|
||||
SCHEMAS_REFRESH_SCHEDULE = int(os.environ.get("REDASH_SCHEMAS_REFRESH_SCHEDULE", 30))
|
||||
SCHEMAS_REFRESH_TIMEOUT = int(os.environ.get("REDASH_SCHEMAS_REFRESH_TIMEOUT", 300))
|
||||
|
||||
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
|
||||
INVITATION_TOKEN_MAX_AGE = int(os.environ.get("REDASH_INVITATION_TOKEN_MAX_AGE", 60 * 60 * 24 * 7))
|
||||
@@ -313,6 +312,7 @@ default_query_runners = [
|
||||
"redash.query_runner.salesforce",
|
||||
"redash.query_runner.query_results",
|
||||
"redash.query_runner.prometheus",
|
||||
"redash.query_runner.qubole",
|
||||
"redash.query_runner.db2",
|
||||
"redash.query_runner.druid",
|
||||
"redash.query_runner.kylin",
|
||||
@@ -339,7 +339,6 @@ default_query_runners = [
|
||||
"redash.query_runner.ignite",
|
||||
"redash.query_runner.oracle",
|
||||
"redash.query_runner.e6data",
|
||||
"redash.query_runner.risingwave",
|
||||
]
|
||||
|
||||
enabled_query_runners = array_from_string(
|
||||
|
||||
@@ -48,7 +48,7 @@ def subscribe(form):
|
||||
"security_notifications": form["security_notifications"],
|
||||
"newsletter": form["newsletter"],
|
||||
}
|
||||
requests.post("https://version.redash.io/subscribe", json=data)
|
||||
requests.post("https://beacon.redash.io/subscribe", json=data)
|
||||
|
||||
|
||||
@job("emails")
|
||||
@@ -72,7 +72,7 @@ def test_connection(data_source_id):
|
||||
return True
|
||||
|
||||
|
||||
@job("schemas", queue_class=Queue, at_front=True, timeout=settings.SCHEMAS_REFRESH_TIMEOUT, ttl=90)
|
||||
@job("schemas", queue_class=Queue, at_front=True, timeout=300, ttl=90)
|
||||
def get_schema(data_source_id, refresh):
|
||||
try:
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
|
||||
import redis
|
||||
from rq import get_current_job
|
||||
@@ -57,7 +55,7 @@ def enqueue_query(query, data_source, user_id, is_api_key=False, scheduled_query
|
||||
if job_complete:
|
||||
message = "job found is complete (%s)" % status
|
||||
elif job_cancelled:
|
||||
message = "job found has been cancelled"
|
||||
message = "job found has ben cancelled"
|
||||
except NoSuchJobError:
|
||||
message = "job found has expired"
|
||||
job_exists = False
|
||||
@@ -147,30 +145,6 @@ def _resolve_user(user_id, is_api_key, query_id):
|
||||
return None
|
||||
|
||||
|
||||
def _get_size_iterative(dict_obj):
|
||||
"""Iteratively finds size of objects in bytes"""
|
||||
seen = set()
|
||||
size = 0
|
||||
objects = deque([dict_obj])
|
||||
|
||||
while objects:
|
||||
current = objects.popleft()
|
||||
if id(current) in seen:
|
||||
continue
|
||||
seen.add(id(current))
|
||||
size += sys.getsizeof(current)
|
||||
|
||||
if isinstance(current, dict):
|
||||
objects.extend(current.keys())
|
||||
objects.extend(current.values())
|
||||
elif hasattr(current, "__dict__"):
|
||||
objects.append(current.__dict__)
|
||||
elif hasattr(current, "__iter__") and not isinstance(current, (str, bytes, bytearray)):
|
||||
objects.extend(current)
|
||||
|
||||
return size
|
||||
|
||||
|
||||
class QueryExecutor:
|
||||
def __init__(self, query, data_source_id, user_id, is_api_key, metadata, is_scheduled_query):
|
||||
self.job = get_current_job()
|
||||
@@ -221,7 +195,7 @@ class QueryExecutor:
|
||||
"job=execute_query query_hash=%s ds_id=%d data_length=%s error=[%s]",
|
||||
self.query_hash,
|
||||
self.data_source_id,
|
||||
data and _get_size_iterative(data),
|
||||
data and len(data),
|
||||
error,
|
||||
)
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ def remove_ghost_locks():
|
||||
logger.info("Locks found: {}, Locks removed: {}".format(len(locks), count))
|
||||
|
||||
|
||||
@job("schemas", timeout=settings.SCHEMAS_REFRESH_TIMEOUT)
|
||||
@job("schemas")
|
||||
def refresh_schema(data_source_id):
|
||||
ds = models.DataSource.get_by_id(data_source_id)
|
||||
logger.info("task=refresh_schema state=start ds_id=%s", ds.id)
|
||||
|
||||
@@ -6,7 +6,7 @@ import sys
|
||||
from rq import Queue as BaseQueue
|
||||
from rq.job import Job as BaseJob
|
||||
from rq.job import JobStatus
|
||||
from rq.timeouts import HorseMonitorTimeoutException
|
||||
from rq.timeouts import HorseMonitorTimeoutException, UnixSignalDeathPenalty
|
||||
from rq.utils import utcnow
|
||||
from rq.worker import (
|
||||
HerokuWorker, # HerokuWorker implements graceful shutdown on SIGTERM
|
||||
@@ -113,44 +113,30 @@ class HardLimitingWorker(BaseWorker):
|
||||
)
|
||||
self.kill_horse()
|
||||
|
||||
def monitor_work_horse(self, job: "Job", queue: "Queue"):
|
||||
def monitor_work_horse(self, job, queue):
|
||||
"""The worker will monitor the work horse and make sure that it
|
||||
either executes successfully or the status of the job is set to
|
||||
failed
|
||||
|
||||
Args:
|
||||
job (Job): _description_
|
||||
queue (Queue): _description_
|
||||
"""
|
||||
self.monitor_started = utcnow()
|
||||
retpid = ret_val = rusage = None
|
||||
job.started_at = utcnow()
|
||||
while True:
|
||||
try:
|
||||
with self.death_penalty_class(self.job_monitoring_interval, HorseMonitorTimeoutException):
|
||||
retpid, ret_val, rusage = self.wait_for_horse()
|
||||
with UnixSignalDeathPenalty(self.job_monitoring_interval, HorseMonitorTimeoutException):
|
||||
retpid, ret_val = os.waitpid(self._horse_pid, 0)
|
||||
break
|
||||
except HorseMonitorTimeoutException:
|
||||
# Horse has not exited yet and is still running.
|
||||
# Send a heartbeat to keep the worker alive.
|
||||
self.set_current_job_working_time((utcnow() - job.started_at).total_seconds())
|
||||
self.heartbeat(self.job_monitoring_interval + 5)
|
||||
|
||||
job.refresh()
|
||||
# Kill the job from this side if something is really wrong (interpreter lock/etc).
|
||||
if job.timeout != -1 and self.current_job_working_time > (job.timeout + 60): # type: ignore
|
||||
self.heartbeat(self.job_monitoring_interval + 60)
|
||||
self.kill_horse()
|
||||
self.wait_for_horse()
|
||||
break
|
||||
|
||||
self.maintain_heartbeats(job)
|
||||
|
||||
if job.is_cancelled:
|
||||
self.stop_executing_job(job)
|
||||
|
||||
if self.soft_limit_exceeded(job):
|
||||
self.enforce_hard_limit(job)
|
||||
|
||||
except OSError as e:
|
||||
# In case we encountered an OSError due to EINTR (which is
|
||||
# caused by a SIGINT or SIGTERM signal during
|
||||
@@ -163,32 +149,29 @@ class HardLimitingWorker(BaseWorker):
|
||||
# Send a heartbeat to keep the worker alive.
|
||||
self.heartbeat()
|
||||
|
||||
self.set_current_job_working_time(0)
|
||||
self._horse_pid = 0 # Set horse PID to 0, horse has finished working
|
||||
if ret_val == os.EX_OK: # The process exited normally.
|
||||
return
|
||||
|
||||
job_status = job.get_status()
|
||||
|
||||
if job_status is None: # Job completed and its ttl has expired
|
||||
return
|
||||
elif self._stopped_job_id == job.id:
|
||||
# Work-horse killed deliberately
|
||||
self.log.warning("Job stopped by user, moving job to FailedJobRegistry")
|
||||
if job.stopped_callback:
|
||||
job.execute_stopped_callback(self.death_penalty_class)
|
||||
self.handle_job_failure(job, queue=queue, exc_string="Job stopped by user, work-horse terminated.")
|
||||
elif job_status not in [JobStatus.FINISHED, JobStatus.FAILED]:
|
||||
if job_status not in [JobStatus.FINISHED, JobStatus.FAILED]:
|
||||
if not job.ended_at:
|
||||
job.ended_at = utcnow()
|
||||
|
||||
# Unhandled failure: move the job to the failed queue
|
||||
signal_msg = f" (signal {os.WTERMSIG(ret_val)})" if ret_val and os.WIFSIGNALED(ret_val) else ""
|
||||
exc_string = f"Work-horse terminated unexpectedly; waitpid returned {ret_val}{signal_msg}; "
|
||||
self.log.warning("Moving job to FailedJobRegistry (%s)", exc_string)
|
||||
self.log.warning(
|
||||
(
|
||||
"Moving job to FailedJobRegistry "
|
||||
"(work-horse terminated unexpectedly; waitpid returned {})" # fmt: skip
|
||||
).format(ret_val)
|
||||
)
|
||||
|
||||
self.handle_work_horse_killed(job, retpid, ret_val, rusage)
|
||||
self.handle_job_failure(job, queue=queue, exc_string=exc_string)
|
||||
self.handle_job_failure(
|
||||
job,
|
||||
queue=queue,
|
||||
exc_string="Work-horse process was terminated unexpectedly "
|
||||
"(waitpid returned %s)" % ret_val, # fmt: skip
|
||||
)
|
||||
|
||||
|
||||
class RedashWorker(StatsdRecordingWorker, HardLimitingWorker):
|
||||
|
||||
@@ -60,7 +60,7 @@ def gen_query_hash(sql):
|
||||
"""
|
||||
sql = COMMENTS_REGEX.sub("", sql)
|
||||
sql = "".join(sql.split())
|
||||
return hashlib.md5(sql.encode("utf-8"), usedforsecurity=False).hexdigest()
|
||||
return hashlib.md5(sql.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def generate_token(length):
|
||||
@@ -70,13 +70,14 @@ def generate_token(length):
|
||||
return "".join(rand.choice(chars) for x in range(length))
|
||||
|
||||
|
||||
json_encoders = [m.custom_json_encoder for m in sys.modules if hasattr(m, "custom_json_encoder")]
|
||||
|
||||
|
||||
class JSONEncoder(json.JSONEncoder):
|
||||
"""Adapter for `json.dumps`."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
from redash.query_runner import query_runners
|
||||
|
||||
self.encoders = [r.custom_json_encoder for r in query_runners.values() if hasattr(r, "custom_json_encoder")]
|
||||
self.encoders = json_encoders
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def default(self, o):
|
||||
@@ -110,7 +111,7 @@ class JSONEncoder(json.JSONEncoder):
|
||||
elif isinstance(o, bytes):
|
||||
result = binascii.hexlify(o).decode()
|
||||
else:
|
||||
result = super().default(o)
|
||||
result = super(JSONEncoder, self).default(o)
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@@ -1,310 +0,0 @@
|
||||
# Copyright (c) 2012, Konsta Vesterinen
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# * The names of the contributors may not be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT,
|
||||
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from inspect import isclass
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.orm import mapperlib
|
||||
from sqlalchemy.orm.properties import ColumnProperty
|
||||
from sqlalchemy.orm.query import _ColumnEntity
|
||||
from sqlalchemy.orm.util import AliasedInsp
|
||||
from sqlalchemy.sql.expression import asc, desc
|
||||
|
||||
|
||||
def get_query_descriptor(query, entity, attr):
|
||||
if attr in query_labels(query):
|
||||
return attr
|
||||
else:
|
||||
entity = get_query_entity_by_alias(query, entity)
|
||||
if entity:
|
||||
descriptor = get_descriptor(entity, attr)
|
||||
if hasattr(descriptor, "property") and isinstance(descriptor.property, sa.orm.RelationshipProperty):
|
||||
return
|
||||
return descriptor
|
||||
|
||||
|
||||
def query_labels(query):
|
||||
"""
|
||||
Return all labels for given SQLAlchemy query object.
|
||||
Example::
|
||||
query = session.query(
|
||||
Category,
|
||||
db.func.count(Article.id).label('articles')
|
||||
)
|
||||
query_labels(query) # ['articles']
|
||||
:param query: SQLAlchemy Query object
|
||||
"""
|
||||
return [
|
||||
entity._label_name for entity in query._entities if isinstance(entity, _ColumnEntity) and entity._label_name
|
||||
]
|
||||
|
||||
|
||||
def get_query_entity_by_alias(query, alias):
|
||||
entities = get_query_entities(query)
|
||||
if not alias:
|
||||
return entities[0]
|
||||
for entity in entities:
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
name = sa.inspect(entity).name
|
||||
else:
|
||||
name = get_mapper(entity).tables[0].name
|
||||
if name == alias:
|
||||
return entity
|
||||
|
||||
|
||||
def get_query_entities(query):
|
||||
"""
|
||||
Return a list of all entities present in given SQLAlchemy query object.
|
||||
Examples::
|
||||
from sqlalchemy_utils import get_query_entities
|
||||
query = session.query(Category)
|
||||
get_query_entities(query) # [<Category>]
|
||||
query = session.query(Category.id)
|
||||
get_query_entities(query) # [<Category>]
|
||||
This function also supports queries with joins.
|
||||
::
|
||||
query = session.query(Category).join(Article)
|
||||
get_query_entities(query) # [<Category>, <Article>]
|
||||
.. versionchanged: 0.26.7
|
||||
This function now returns a list instead of generator
|
||||
:param query: SQLAlchemy Query object
|
||||
"""
|
||||
exprs = [
|
||||
d["expr"] if is_labeled_query(d["expr"]) or isinstance(d["expr"], sa.Column) else d["entity"]
|
||||
for d in query.column_descriptions
|
||||
]
|
||||
return [get_query_entity(expr) for expr in exprs] + [get_query_entity(entity) for entity in query._join_entities]
|
||||
|
||||
|
||||
def is_labeled_query(expr):
|
||||
return isinstance(expr, sa.sql.elements.Label) and isinstance(
|
||||
list(expr.base_columns)[0], (sa.sql.selectable.Select, sa.sql.selectable.ScalarSelect)
|
||||
)
|
||||
|
||||
|
||||
def get_query_entity(expr):
|
||||
if isinstance(expr, sa.orm.attributes.InstrumentedAttribute):
|
||||
return expr.parent.class_
|
||||
elif isinstance(expr, sa.Column):
|
||||
return expr.table
|
||||
elif isinstance(expr, AliasedInsp):
|
||||
return expr.entity
|
||||
return expr
|
||||
|
||||
|
||||
def get_mapper(mixed):
|
||||
"""
|
||||
Return related SQLAlchemy Mapper for given SQLAlchemy object.
|
||||
:param mixed: SQLAlchemy Table / Alias / Mapper / declarative model object
|
||||
::
|
||||
from sqlalchemy_utils import get_mapper
|
||||
get_mapper(User)
|
||||
get_mapper(User())
|
||||
get_mapper(User.__table__)
|
||||
get_mapper(User.__mapper__)
|
||||
get_mapper(sa.orm.aliased(User))
|
||||
get_mapper(sa.orm.aliased(User.__table__))
|
||||
Raises:
|
||||
ValueError: if multiple mappers were found for given argument
|
||||
.. versionadded: 0.26.1
|
||||
"""
|
||||
if isinstance(mixed, sa.orm.query._MapperEntity):
|
||||
mixed = mixed.expr
|
||||
elif isinstance(mixed, sa.Column):
|
||||
mixed = mixed.table
|
||||
elif isinstance(mixed, sa.orm.query._ColumnEntity):
|
||||
mixed = mixed.expr
|
||||
if isinstance(mixed, sa.orm.Mapper):
|
||||
return mixed
|
||||
if isinstance(mixed, sa.orm.util.AliasedClass):
|
||||
return sa.inspect(mixed).mapper
|
||||
if isinstance(mixed, sa.sql.selectable.Alias):
|
||||
mixed = mixed.element
|
||||
if isinstance(mixed, AliasedInsp):
|
||||
return mixed.mapper
|
||||
if isinstance(mixed, sa.orm.attributes.InstrumentedAttribute):
|
||||
mixed = mixed.class_
|
||||
if isinstance(mixed, sa.Table):
|
||||
mappers = [mapper for mapper in mapperlib._mapper_registry if mixed in mapper.tables]
|
||||
if len(mappers) > 1:
|
||||
raise ValueError("Multiple mappers found for table '%s'." % mixed.name)
|
||||
elif not mappers:
|
||||
raise ValueError("Could not get mapper for table '%s'." % mixed.name)
|
||||
else:
|
||||
return mappers[0]
|
||||
if not isclass(mixed):
|
||||
mixed = type(mixed)
|
||||
return sa.inspect(mixed)
|
||||
|
||||
|
||||
def get_polymorphic_mappers(mixed):
|
||||
if isinstance(mixed, AliasedInsp):
|
||||
return mixed.with_polymorphic_mappers
|
||||
else:
|
||||
return mixed.polymorphic_map.values()
|
||||
|
||||
|
||||
def get_descriptor(entity, attr):
|
||||
mapper = sa.inspect(entity)
|
||||
for key, descriptor in get_all_descriptors(mapper).items():
|
||||
if attr == key:
|
||||
prop = descriptor.property if hasattr(descriptor, "property") else None
|
||||
if isinstance(prop, ColumnProperty):
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
for c in mapper.selectable.c:
|
||||
if c.key == attr:
|
||||
return c
|
||||
else:
|
||||
# If the property belongs to a class that uses
|
||||
# polymorphic inheritance we have to take into account
|
||||
# situations where the attribute exists in child class
|
||||
# but not in parent class.
|
||||
return getattr(prop.parent.class_, attr)
|
||||
else:
|
||||
# Handle synonyms, relationship properties and hybrid
|
||||
# properties
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
return getattr(entity, attr)
|
||||
try:
|
||||
return getattr(mapper.class_, attr)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def get_all_descriptors(expr):
|
||||
if isinstance(expr, sa.sql.selectable.Selectable):
|
||||
return expr.c
|
||||
insp = sa.inspect(expr)
|
||||
try:
|
||||
polymorphic_mappers = get_polymorphic_mappers(insp)
|
||||
except sa.exc.NoInspectionAvailable:
|
||||
return get_mapper(expr).all_orm_descriptors
|
||||
else:
|
||||
attrs = dict(get_mapper(expr).all_orm_descriptors)
|
||||
for submapper in polymorphic_mappers:
|
||||
for key, descriptor in submapper.all_orm_descriptors.items():
|
||||
if key not in attrs:
|
||||
attrs[key] = descriptor
|
||||
return attrs
|
||||
|
||||
|
||||
class QuerySorterException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class QuerySorter:
|
||||
def __init__(self, silent=True, separator="-"):
|
||||
self.separator = separator
|
||||
self.silent = silent
|
||||
|
||||
def assign_order_by(self, entity, attr, func):
|
||||
expr = get_query_descriptor(self.query, entity, attr)
|
||||
if expr is not None:
|
||||
return self.query.order_by(func(expr))
|
||||
if not self.silent:
|
||||
raise QuerySorterException("Could not sort query with expression '%s'" % attr)
|
||||
return self.query
|
||||
|
||||
def parse_sort_arg(self, arg):
|
||||
if arg[0] == self.separator:
|
||||
func = desc
|
||||
arg = arg[1:]
|
||||
else:
|
||||
func = asc
|
||||
parts = arg.split(self.separator)
|
||||
return {
|
||||
"entity": parts[0] if len(parts) > 1 else None,
|
||||
"attr": parts[1] if len(parts) > 1 else arg,
|
||||
"func": func,
|
||||
}
|
||||
|
||||
def __call__(self, query, *args):
|
||||
self.query = query
|
||||
for sort in args:
|
||||
if not sort:
|
||||
continue
|
||||
self.query = self.assign_order_by(**self.parse_sort_arg(sort))
|
||||
return self.query
|
||||
|
||||
|
||||
def sort_query(query, *args, **kwargs):
|
||||
"""
|
||||
Applies an sql ORDER BY for given query. This function can be easily used
|
||||
with user-defined sorting.
|
||||
The examples use the following model definition:
|
||||
::
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy_utils import sort_query
|
||||
engine = create_engine(
|
||||
'sqlite:///'
|
||||
)
|
||||
Base = declarative_base()
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
class Category(Base):
|
||||
__tablename__ = 'category'
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
name = sa.Column(sa.Unicode(255))
|
||||
class Article(Base):
|
||||
__tablename__ = 'article'
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
name = sa.Column(sa.Unicode(255))
|
||||
category_id = sa.Column(sa.Integer, sa.ForeignKey(Category.id))
|
||||
category = sa.orm.relationship(
|
||||
Category, primaryjoin=category_id == Category.id
|
||||
)
|
||||
1. Applying simple ascending sort
|
||||
::
|
||||
query = session.query(Article)
|
||||
query = sort_query(query, 'name')
|
||||
2. Applying descending sort
|
||||
::
|
||||
query = sort_query(query, '-name')
|
||||
3. Applying sort to custom calculated label
|
||||
::
|
||||
query = session.query(
|
||||
Category, sa.func.count(Article.id).label('articles')
|
||||
)
|
||||
query = sort_query(query, 'articles')
|
||||
4. Applying sort to joined table column
|
||||
::
|
||||
query = session.query(Article).join(Article.category)
|
||||
query = sort_query(query, 'category-name')
|
||||
:param query:
|
||||
query to be modified
|
||||
:param sort:
|
||||
string that defines the label or column to sort the query by
|
||||
:param silent:
|
||||
Whether or not to raise exceptions if unknown sort column
|
||||
is passed. By default this is `True` indicating that no errors should
|
||||
be raised for unknown columns.
|
||||
"""
|
||||
return QuerySorter(**kwargs)(query, *args)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user