mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 10:00:45 -05:00
Compare commits
105 Commits
24.08.0-de
...
25.07.0-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9ddd2a7d6 | ||
|
|
6b1e910126 | ||
|
|
14550a9a6c | ||
|
|
b80c5f6a7c | ||
|
|
e46d44f208 | ||
|
|
a1a4bc9d3e | ||
|
|
0900178d24 | ||
|
|
5d31429ca8 | ||
|
|
2f35ceb803 | ||
|
|
8e6c02ecde | ||
|
|
231fd36d46 | ||
|
|
0b6a53a079 | ||
|
|
6167edf97c | ||
|
|
4ed0ad3c9c | ||
|
|
2375f0b05f | ||
|
|
eced377ae4 | ||
|
|
84262fe143 | ||
|
|
612eb8c630 | ||
|
|
866fb48afb | ||
|
|
353776e8e1 | ||
|
|
594e2f24ef | ||
|
|
3275a9e459 | ||
|
|
3bad8c8e8c | ||
|
|
d0af4499d6 | ||
|
|
4357ea56ae | ||
|
|
5df5ca87a2 | ||
|
|
8387fe6fcb | ||
|
|
e95de2ee4c | ||
|
|
71902e5933 | ||
|
|
53eab14cef | ||
|
|
925bb91d8e | ||
|
|
ec2ca6f986 | ||
|
|
96ea0194e8 | ||
|
|
2776992101 | ||
|
|
85f001982e | ||
|
|
d03a2c4096 | ||
|
|
8c5890482a | ||
|
|
10ce280a96 | ||
|
|
0dd7ac3d2e | ||
|
|
4ee53a9445 | ||
|
|
c08292d90e | ||
|
|
3142131cdd | ||
|
|
530c1a0734 | ||
|
|
52dc1769a1 | ||
|
|
b9583c0b48 | ||
|
|
89d7f54e90 | ||
|
|
d884da2b0b | ||
|
|
f7d485082c | ||
|
|
130ab1fe1a | ||
|
|
2ff83679fe | ||
|
|
de49b73855 | ||
|
|
c12e68f5d1 | ||
|
|
baa9bbd505 | ||
|
|
349cd5d031 | ||
|
|
49277d27f8 | ||
|
|
2aae5705c9 | ||
|
|
38d0579660 | ||
|
|
673ba769c7 | ||
|
|
b922730482 | ||
|
|
ba973eb1fe | ||
|
|
d8dde6c544 | ||
|
|
d359a716a7 | ||
|
|
ba4293912b | ||
|
|
ee359120ee | ||
|
|
04a25f4327 | ||
|
|
7c22756e66 | ||
|
|
a03668f5b2 | ||
|
|
e4a841a0c5 | ||
|
|
38dc31a49b | ||
|
|
c42b15125c | ||
|
|
590d39bc8d | ||
|
|
79bbb248bb | ||
|
|
5cf0b7b038 | ||
|
|
fb1a056561 | ||
|
|
75e1ce4c9c | ||
|
|
d6c6e3bb7a | ||
|
|
821c1a9488 | ||
|
|
76eeea1f64 | ||
|
|
2ab07f9fc3 | ||
|
|
a85b9d7801 | ||
|
|
3330815081 | ||
|
|
c25c65bc04 | ||
|
|
79a4c4c9c9 | ||
|
|
58a7438cc8 | ||
|
|
c073c1e154 | ||
|
|
159a329e26 | ||
|
|
9de135c0bd | ||
|
|
285c2b6e56 | ||
|
|
b1fe2d4162 | ||
|
|
a4f92a8fb5 | ||
|
|
51ef625a30 | ||
|
|
a2611b89a3 | ||
|
|
a531597016 | ||
|
|
e59c02f497 | ||
|
|
c1a60bf6d2 | ||
|
|
72203655ec | ||
|
|
5257e39282 | ||
|
|
ec70ff4408 | ||
|
|
ed8c05f634 | ||
|
|
86b75db82e | ||
|
|
660d04b0f1 | ||
|
|
fc1e1f7a01 | ||
|
|
8725fa4737 | ||
|
|
ea0b3cbe3a | ||
|
|
714b950fde |
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -3,7 +3,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request_target:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
env:
|
||||
@@ -60,15 +60,17 @@ jobs:
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v3
|
||||
# with:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: backend-test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.xml
|
||||
@@ -94,9 +96,9 @@ jobs:
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: frontend-test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
@@ -132,9 +134,9 @@ jobs:
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
# PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
# CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
# CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
@@ -169,7 +171,7 @@ jobs:
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage
|
||||
|
||||
66
.github/workflows/periodic-snapshot.yml
vendored
66
.github/workflows/periodic-snapshot.yml
vendored
@@ -1,30 +1,86 @@
|
||||
name: Periodic Snapshot
|
||||
|
||||
# 10 minutes after midnight on the first of every month
|
||||
on:
|
||||
schedule:
|
||||
- cron: '10 0 1 * *'
|
||||
- cron: '10 0 1 * *' # 10 minutes after midnight on the first day of every month
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bump:
|
||||
description: 'Bump the last digit of the version'
|
||||
required: false
|
||||
type: boolean
|
||||
version:
|
||||
description: 'Specific version to set'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
bump-version-and-tag:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref_name == github.event.repository.default_branch
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ssh-key: ${{secrets.ACTION_PUSH_KEY}}
|
||||
ssh-key: ${{ secrets.ACTION_PUSH_KEY }}
|
||||
|
||||
- run: |
|
||||
# https://api.github.com/users/github-actions[bot]
|
||||
git config user.name 'github-actions[bot]'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
# Function to bump the version
|
||||
bump_version() {
|
||||
local version="$1"
|
||||
local IFS=.
|
||||
read -r major minor patch <<< "$version"
|
||||
patch=$((patch + 1))
|
||||
echo "$major.$minor.$patch-dev"
|
||||
}
|
||||
|
||||
# Determine the new version tag
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
BUMP_INPUT="${{ github.event.inputs.bump }}"
|
||||
SPECIFIC_VERSION="${{ github.event.inputs.version }}"
|
||||
|
||||
# Check if both bump and specific version are provided
|
||||
if [ "$BUMP_INPUT" = "true" ] && [ -n "$SPECIFIC_VERSION" ]; then
|
||||
echo "::error::Error: Cannot specify both bump and specific version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$SPECIFIC_VERSION" ]; then
|
||||
TAG_NAME="$SPECIFIC_VERSION-dev"
|
||||
elif [ "$BUMP_INPUT" = "true" ]; then
|
||||
CURRENT_VERSION=$(grep '"version":' package.json | awk -F\" '{print $4}')
|
||||
TAG_NAME=$(bump_version "$CURRENT_VERSION")
|
||||
else
|
||||
echo "No version bump or specific version provided for manual dispatch."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
fi
|
||||
|
||||
echo "New version tag: $TAG_NAME"
|
||||
|
||||
# Update version in files
|
||||
gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${TAG_NAME}"
|
||||
git tag ${TAG_NAME}
|
||||
git push --atomic origin master refs/tags/${TAG_NAME}
|
||||
|
||||
# Run the 'preview-image' workflow if run this workflow manually
|
||||
# For more information, please see the: https://docs.github.com/en/actions/security-guides/automatic-token-authentication
|
||||
if [ "$BUMP_INPUT" = "true" ] || [ -n "$SPECIFIC_VERSION" ]; then
|
||||
gh workflow run preview-image.yml --ref $TAG_NAME
|
||||
fi
|
||||
|
||||
122
.github/workflows/preview-image.yml
vendored
122
.github/workflows/preview-image.yml
vendored
@@ -3,6 +3,16 @@ on:
|
||||
push:
|
||||
tags:
|
||||
- '*-dev'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dockerRepository:
|
||||
description: 'Docker repository'
|
||||
required: true
|
||||
default: 'preview'
|
||||
type: choice
|
||||
options:
|
||||
- preview
|
||||
- redash
|
||||
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
@@ -22,6 +32,9 @@ jobs:
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ vars.DOCKER_REPOSITORY }}" == '' ]]; then
|
||||
echo 'Docker repository is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
@@ -29,7 +42,20 @@ jobs:
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-22.04
|
||||
- arch: arm64
|
||||
os: ubuntu-22.04-arm
|
||||
outputs:
|
||||
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
|
||||
needs:
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
@@ -44,11 +70,6 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
@@ -58,6 +79,13 @@ jobs:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
@@ -67,21 +95,91 @@ jobs:
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push preview image to Docker Hub
|
||||
id: build-preview
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
${{ vars.DOCKER_REPOSITORY }}/redash
|
||||
${{ vars.DOCKER_REPOSITORY }}/preview
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64
|
||||
outputs: type=image,push-by-digest=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: Build and push release image to Docker Hub
|
||||
id: build-release
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
with:
|
||||
tags: |
|
||||
${{ vars.DOCKER_REPOSITORY }}/redash:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
outputs: type=image,push-by-digest=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
|
||||
digest="${{ steps.build-preview.outputs.digest}}"
|
||||
else
|
||||
digest="${{ steps.build-release.outputs.digest}}"
|
||||
fi
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
|
||||
merge-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: build-docker-image
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Create and push manifest for the preview image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:preview \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:preview@sha256:%s ' *)
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
|
||||
- name: Create and push manifest for the release image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
|
||||
36
.github/workflows/restyled.yml
vendored
Normal file
36
.github/workflows/restyled.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Restyled
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
restyled:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- uses: restyled-io/actions/setup@v4
|
||||
- id: restyler
|
||||
uses: restyled-io/actions/run@v4
|
||||
with:
|
||||
fail-on-differences: true
|
||||
|
||||
- if: |
|
||||
!cancelled() &&
|
||||
steps.restyler.outputs.success == 'true' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
base: ${{ steps.restyler.outputs.restyled-base }}
|
||||
branch: ${{ steps.restyler.outputs.restyled-head }}
|
||||
title: ${{ steps.restyler.outputs.restyled-title }}
|
||||
body: ${{ steps.restyler.outputs.restyled-body }}
|
||||
labels: "restyled"
|
||||
reviewers: ${{ github.event.pull_request.user.login }}
|
||||
delete-branch: true
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -17,6 +17,7 @@ client/dist
|
||||
_build
|
||||
.vscode
|
||||
.env
|
||||
.tool-versions
|
||||
|
||||
dump.rdb
|
||||
|
||||
|
||||
51
Dockerfile
51
Dockerfile
@@ -1,4 +1,4 @@
|
||||
FROM node:18-bookworm as frontend-builder
|
||||
FROM node:18-bookworm AS frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
|
||||
@@ -20,11 +20,22 @@ COPY --chown=redash scripts /frontend/scripts
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
# Avoid issues caused by lags in disk and network I/O speeds when working on top of QEMU emulation for multi-platform image building.
|
||||
RUN yarn config set network-timeout 300000
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
RUN <<EOF
|
||||
if [ "x$skip_frontend_build" = "x" ]; then
|
||||
yarn build
|
||||
else
|
||||
mkdir -p /frontend/client/dist
|
||||
touch /frontend/client/dist/multi_org.html
|
||||
touch /frontend/client/dist/index.html
|
||||
fi
|
||||
EOF
|
||||
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
@@ -64,28 +75,34 @@ RUN apt-get update && \
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg \
|
||||
&& curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/simba \
|
||||
&& dpkg -i /tmp/simba/*.deb \
|
||||
&& printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
|
||||
&& rm /tmp/simba_odbc.zip \
|
||||
&& rm -rf /tmp/simba; fi
|
||||
RUN <<EOF
|
||||
if [ "$TARGETPLATFORM" = "linux/amd64" ]; then
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg
|
||||
curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list
|
||||
apt-get update
|
||||
ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip
|
||||
chmod 600 /tmp/simba_odbc.zip
|
||||
unzip /tmp/simba_odbc.zip -d /tmp/simba
|
||||
dpkg -i /tmp/simba/*.deb
|
||||
printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini
|
||||
rm /tmp/simba_odbc.zip
|
||||
rm -rf /tmp/simba
|
||||
fi
|
||||
EOF
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV POETRY_VERSION=1.6.1
|
||||
ENV POETRY_VERSION=1.8.3
|
||||
ENV POETRY_HOME=/etc/poetry
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
# Avoid crashes, including corrupted cache artifacts, when building multi-platform images with GitHub Actions.
|
||||
RUN /etc/poetry/bin/poetry cache clear pypi --all
|
||||
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
|
||||
ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi"
|
||||
|
||||
8
Makefile
8
Makefile
@@ -4,7 +4,11 @@ compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
|
||||
up:
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build
|
||||
docker compose up -d redis postgres --remove-orphans
|
||||
docker compose exec -u postgres postgres psql postgres --csv \
|
||||
-1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \
|
||||
| grep -q "organizations" || make create_database
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build --remove-orphans
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
@@ -30,7 +34,7 @@ clean:
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
|
||||
redash/redash:latest redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
|
||||
down:
|
||||
|
||||
@@ -67,7 +67,7 @@ help() {
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "debug -- start Flask development server with remote debugger via debugpy"
|
||||
echo "create_db -- create database tables"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import React from "react";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import { clientConfig, currentUser } from "@/services/auth";
|
||||
import frontendVersion from "@/version.json";
|
||||
|
||||
export default function VersionInfo() {
|
||||
@@ -9,6 +10,15 @@ export default function VersionInfo() {
|
||||
Version: {clientConfig.version}
|
||||
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
|
||||
</div>
|
||||
{clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
|
||||
<div className="m-t-10">
|
||||
{/* eslint-disable react/jsx-no-target-blank */}
|
||||
<Link href="https://version.redash.io/" className="update-available" target="_blank" rel="noopener">
|
||||
Update Available <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
79
client/app/components/BeaconConsent.jsx
Normal file
79
client/app/components/BeaconConsent.jsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import React, { useState } from "react";
|
||||
import Card from "antd/lib/card";
|
||||
import Button from "antd/lib/button";
|
||||
import Typography from "antd/lib/typography";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import OrgSettings from "@/services/organizationSettings";
|
||||
|
||||
const Text = Typography.Text;
|
||||
|
||||
function BeaconConsent() {
|
||||
const [hide, setHide] = useState(false);
|
||||
|
||||
if (!clientConfig.showBeaconConsentMessage || hide) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hideConsentCard = () => {
|
||||
clientConfig.showBeaconConsentMessage = false;
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = (confirm) => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
message = "Settings Saved.";
|
||||
}
|
||||
|
||||
OrgSettings.save({ beacon_consent: confirm }, message)
|
||||
// .then(() => {
|
||||
// // const settings = get(response, 'settings');
|
||||
// // this.setState({ settings, formValues: { ...settings } });
|
||||
// })
|
||||
.finally(hideConsentCard);
|
||||
};
|
||||
|
||||
return (
|
||||
<DynamicComponent name="BeaconConsent">
|
||||
<div className="m-t-10 tiled">
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
Would you be ok with sharing anonymous usage data with the Redash team?{" "}
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}
|
||||
>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
<li> Number of users, queries, dashboards, alerts, widgets and visualizations.</li>
|
||||
<li> Types of data sources, alert destinations and visualizations.</li>
|
||||
</ul>
|
||||
</div>
|
||||
<Text>All data is aggregated and will never include any sensitive or private data.</Text>
|
||||
<div className="m-t-5">
|
||||
<Button type="primary" className="m-r-5" onClick={() => confirmConsent(true)}>
|
||||
Yes
|
||||
</Button>
|
||||
<Button type="default" onClick={() => confirmConsent(false)}>
|
||||
No
|
||||
</Button>
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the <Link href="settings/general">Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
export default BeaconConsent;
|
||||
@@ -23,6 +23,7 @@ export const TYPES = mapValues(
|
||||
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
|
||||
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
|
||||
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
|
||||
USAGE_DATA_SHARING: ["/open-source/admin-guide/usage-data", "Help: Anonymous Usage Data Sharing"],
|
||||
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
|
||||
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
|
||||
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
|
||||
@@ -100,7 +101,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
}
|
||||
|
||||
loadIframe = url => {
|
||||
loadIframe = (url) => {
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
this.setState({ loading: true, error: false });
|
||||
|
||||
@@ -115,8 +116,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
};
|
||||
|
||||
onPostMessageReceived = event => {
|
||||
if (!some(allowedDomains, domain => startsWith(event.origin, domain))) {
|
||||
onPostMessageReceived = (event) => {
|
||||
if (!some(allowedDomains, (domain) => startsWith(event.origin, domain))) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -133,7 +134,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
return helpTriggerType ? helpTriggerType[0] : this.props.href;
|
||||
};
|
||||
|
||||
openDrawer = e => {
|
||||
openDrawer = (e) => {
|
||||
// keep "open in new tab" behavior
|
||||
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
|
||||
e.preventDefault();
|
||||
@@ -143,7 +144,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
}
|
||||
};
|
||||
|
||||
closeDrawer = event => {
|
||||
closeDrawer = (event) => {
|
||||
if (event) {
|
||||
event.preventDefault();
|
||||
}
|
||||
@@ -160,7 +161,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
|
||||
const className = cx("help-trigger", this.props.className);
|
||||
const url = this.state.currentUrl;
|
||||
const isAllowedDomain = some(allowedDomains, domain => startsWith(url || targetUrl, domain));
|
||||
const isAllowedDomain = some(allowedDomains, (domain) => startsWith(url || targetUrl, domain));
|
||||
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
|
||||
|
||||
return (
|
||||
@@ -179,13 +180,15 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
)}
|
||||
</>
|
||||
) : null
|
||||
}>
|
||||
}
|
||||
>
|
||||
<Link
|
||||
href={url || this.getUrl()}
|
||||
className={className}
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}>
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}
|
||||
>
|
||||
{this.props.children}
|
||||
</Link>
|
||||
</Tooltip>
|
||||
@@ -196,7 +199,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
visible={this.state.visible}
|
||||
className={cx("help-drawer", drawerClassName)}
|
||||
destroyOnClose
|
||||
width={400}>
|
||||
width={400}
|
||||
>
|
||||
<div className="drawer-wrapper">
|
||||
<div className="drawer-menu">
|
||||
{url && (
|
||||
|
||||
@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
|
||||
// DataSourcePreviewCard
|
||||
|
||||
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
|
||||
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
|
||||
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
|
||||
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
|
||||
return (
|
||||
<PreviewCard {...props} imageUrl={imageUrl} title={title}>
|
||||
|
||||
@@ -96,7 +96,7 @@ function EmptyState({
|
||||
}, []);
|
||||
|
||||
// Show if `onboardingMode=false` or any requested step not completed
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
|
||||
|
||||
if (!shouldShow) {
|
||||
return null;
|
||||
@@ -181,7 +181,7 @@ function EmptyState({
|
||||
];
|
||||
|
||||
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
|
||||
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
|
||||
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
|
||||
|
||||
return (
|
||||
<div className="empty-state-wrapper">
|
||||
@@ -196,7 +196,7 @@ function EmptyState({
|
||||
</div>
|
||||
<div className="empty-state__steps">
|
||||
<h4>Let's get started</h4>
|
||||
<ol>{stepsItems.map(item => item.node)}</ol>
|
||||
<ol>{stepsItems.map((item) => item.node)}</ol>
|
||||
{helpMessage}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -28,6 +28,7 @@ export interface Controller<I, P = any> {
|
||||
orderByField?: string;
|
||||
orderByReverse: boolean;
|
||||
toggleSorting: (orderByField: string) => void;
|
||||
setSorting: (orderByField: string, orderByReverse: boolean) => void;
|
||||
|
||||
// pagination
|
||||
page: number;
|
||||
@@ -139,10 +140,11 @@ export function wrap<I, P = any>(
|
||||
this.props.onError!(error);
|
||||
|
||||
const initialState = this.getState({ ...itemsSource.getState(), isLoaded: false });
|
||||
const { updatePagination, toggleSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
|
||||
const { updatePagination, toggleSorting, setSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
|
||||
this.state = {
|
||||
...initialState,
|
||||
toggleSorting, // eslint-disable-line react/no-unused-state
|
||||
setSorting, // eslint-disable-line react/no-unused-state
|
||||
updateSearch: debounce(updateSearch, 200), // eslint-disable-line react/no-unused-state
|
||||
updateSelectedTags, // eslint-disable-line react/no-unused-state
|
||||
updatePagination, // eslint-disable-line react/no-unused-state
|
||||
|
||||
@@ -39,14 +39,12 @@ export class ItemsSource {
|
||||
const customParams = {};
|
||||
const context = {
|
||||
...this.getCallbackContext(),
|
||||
setCustomParams: params => {
|
||||
setCustomParams: (params) => {
|
||||
extend(customParams, params);
|
||||
},
|
||||
};
|
||||
return this._beforeUpdate().then(() => {
|
||||
const fetchToken = Math.random()
|
||||
.toString(36)
|
||||
.substr(2);
|
||||
const fetchToken = Math.random().toString(36).substr(2);
|
||||
this._currentFetchToken = fetchToken;
|
||||
return this._fetcher
|
||||
.fetch(changes, state, context)
|
||||
@@ -59,7 +57,7 @@ export class ItemsSource {
|
||||
return this._afterUpdate();
|
||||
}
|
||||
})
|
||||
.catch(error => this.handleError(error));
|
||||
.catch((error) => this.handleError(error));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -124,13 +122,20 @@ export class ItemsSource {
|
||||
});
|
||||
};
|
||||
|
||||
toggleSorting = orderByField => {
|
||||
toggleSorting = (orderByField) => {
|
||||
this._sorter.toggleField(orderByField);
|
||||
this._savedOrderByField = this._sorter.field;
|
||||
this._changed({ sorting: true });
|
||||
};
|
||||
|
||||
updateSearch = searchTerm => {
|
||||
setSorting = (orderByField, orderByReverse) => {
|
||||
this._sorter.setField(orderByField);
|
||||
this._sorter.setReverse(orderByReverse);
|
||||
this._savedOrderByField = this._sorter.field;
|
||||
this._changed({ sorting: true });
|
||||
};
|
||||
|
||||
updateSearch = (searchTerm) => {
|
||||
// here we update state directly, but later `fetchData` will update it properly
|
||||
this._searchTerm = searchTerm;
|
||||
// in search mode ignore the ordering and use the ranking order
|
||||
@@ -145,7 +150,7 @@ export class ItemsSource {
|
||||
this._changed({ search: true, pagination: { page: true } });
|
||||
};
|
||||
|
||||
updateSelectedTags = selectedTags => {
|
||||
updateSelectedTags = (selectedTags) => {
|
||||
this._selectedTags = selectedTags;
|
||||
this._paginator.setPage(1);
|
||||
this._changed({ tags: true, pagination: { page: true } });
|
||||
@@ -153,7 +158,7 @@ export class ItemsSource {
|
||||
|
||||
update = () => this._changed();
|
||||
|
||||
handleError = error => {
|
||||
handleError = (error) => {
|
||||
if (isFunction(this.onError)) {
|
||||
this.onError(error);
|
||||
}
|
||||
@@ -172,7 +177,7 @@ export class ResourceItemsSource extends ItemsSource {
|
||||
processResults: (results, context) => {
|
||||
let processItem = getItemProcessor(context);
|
||||
processItem = isFunction(processItem) ? processItem : identity;
|
||||
return map(results, item => processItem(item, context));
|
||||
return map(results, (item) => processItem(item, context));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ export const Columns = {
|
||||
date(overrides) {
|
||||
return extend(
|
||||
{
|
||||
render: text => formatDate(text),
|
||||
render: (text) => formatDate(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -52,7 +52,7 @@ export const Columns = {
|
||||
dateTime(overrides) {
|
||||
return extend(
|
||||
{
|
||||
render: text => formatDateTime(text),
|
||||
render: (text) => formatDateTime(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -62,7 +62,7 @@ export const Columns = {
|
||||
{
|
||||
width: "1%",
|
||||
className: "text-nowrap",
|
||||
render: text => durationHumanize(text),
|
||||
render: (text) => durationHumanize(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -70,7 +70,7 @@ export const Columns = {
|
||||
timeAgo(overrides, timeAgoCustomProps = undefined) {
|
||||
return extend(
|
||||
{
|
||||
render: value => <TimeAgo date={value} {...timeAgoCustomProps} />,
|
||||
render: (value) => <TimeAgo date={value} {...timeAgoCustomProps} />,
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -110,6 +110,7 @@ export default class ItemsTable extends React.Component {
|
||||
orderByField: PropTypes.string,
|
||||
orderByReverse: PropTypes.bool,
|
||||
toggleSorting: PropTypes.func,
|
||||
setSorting: PropTypes.func,
|
||||
"data-test": PropTypes.string,
|
||||
rowKey: PropTypes.oneOfType([PropTypes.string, PropTypes.func]),
|
||||
};
|
||||
@@ -127,18 +128,15 @@ export default class ItemsTable extends React.Component {
|
||||
};
|
||||
|
||||
prepareColumns() {
|
||||
const { orderByField, orderByReverse, toggleSorting } = this.props;
|
||||
const { orderByField, orderByReverse } = this.props;
|
||||
const orderByDirection = orderByReverse ? "descend" : "ascend";
|
||||
|
||||
return map(
|
||||
map(
|
||||
filter(this.props.columns, column => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
|
||||
column => extend(column, { orderByField: column.orderByField || column.field })
|
||||
filter(this.props.columns, (column) => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
|
||||
(column) => extend(column, { orderByField: column.orderByField || column.field })
|
||||
),
|
||||
(column, index) => {
|
||||
// Bind click events only to sortable columns
|
||||
const onHeaderCell = column.sorter ? () => ({ onClick: () => toggleSorting(column.orderByField) }) : null;
|
||||
|
||||
// Wrap render function to pass correct arguments
|
||||
const render = isFunction(column.render) ? (text, row) => column.render(text, row.item) : identity;
|
||||
|
||||
@@ -146,14 +144,13 @@ export default class ItemsTable extends React.Component {
|
||||
key: "column" + index,
|
||||
dataIndex: ["item", column.field],
|
||||
defaultSortOrder: column.orderByField === orderByField ? orderByDirection : null,
|
||||
onHeaderCell,
|
||||
render,
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
getRowKey = record => {
|
||||
getRowKey = (record) => {
|
||||
const { rowKey } = this.props;
|
||||
if (rowKey) {
|
||||
if (isFunction(rowKey)) {
|
||||
@@ -172,22 +169,43 @@ export default class ItemsTable extends React.Component {
|
||||
|
||||
// Bind events only if `onRowClick` specified
|
||||
const onTableRow = isFunction(this.props.onRowClick)
|
||||
? row => ({
|
||||
onClick: event => {
|
||||
? (row) => ({
|
||||
onClick: (event) => {
|
||||
this.props.onRowClick(event, row.item);
|
||||
},
|
||||
})
|
||||
: null;
|
||||
|
||||
const onChange = (pagination, filters, sorter, extra) => {
|
||||
const action = extra?.action;
|
||||
if (action === "sort") {
|
||||
const propsColumn = this.props.columns.find((column) => column.field === sorter.field[1]);
|
||||
if (!propsColumn.sorter) {
|
||||
return;
|
||||
}
|
||||
let orderByField = propsColumn.orderByField;
|
||||
const orderByReverse = sorter.order === "descend";
|
||||
|
||||
if (orderByReverse === undefined) {
|
||||
orderByField = null;
|
||||
}
|
||||
if (this.props.setSorting) {
|
||||
this.props.setSorting(orderByField, orderByReverse);
|
||||
} else {
|
||||
this.props.toggleSorting(orderByField);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const { showHeader } = this.props;
|
||||
if (this.props.loading) {
|
||||
if (isEmpty(tableDataProps.dataSource)) {
|
||||
tableDataProps.columns = tableDataProps.columns.map(column => ({
|
||||
tableDataProps.columns = tableDataProps.columns.map((column) => ({
|
||||
...column,
|
||||
sorter: false,
|
||||
render: () => <Skeleton active paragraph={false} />,
|
||||
}));
|
||||
tableDataProps.dataSource = range(10).map(key => ({ key: `${key}` }));
|
||||
tableDataProps.dataSource = range(10).map((key) => ({ key: `${key}` }));
|
||||
} else {
|
||||
tableDataProps.loading = { indicator: null };
|
||||
}
|
||||
@@ -200,6 +218,7 @@ export default class ItemsTable extends React.Component {
|
||||
rowKey={this.getRowKey}
|
||||
pagination={false}
|
||||
onRow={onTableRow}
|
||||
onChange={onChange}
|
||||
data-test={this.props["data-test"]}
|
||||
{...tableDataProps}
|
||||
/>
|
||||
|
||||
@@ -65,6 +65,7 @@ export const Query = PropTypes.shape({
|
||||
|
||||
export const AlertOptions = PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.oneOf(["first", "min", "max"]),
|
||||
op: PropTypes.oneOf([">", ">=", "<", "<=", "==", "!="]),
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
custom_subject: PropTypes.string,
|
||||
@@ -83,6 +84,7 @@ export const Alert = PropTypes.shape({
|
||||
query: Query,
|
||||
options: PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.string,
|
||||
op: PropTypes.string,
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
}).isRequired,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<html lang="en" translate="no">
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta charset="UTF-8" />
|
||||
|
||||
@@ -16,6 +16,7 @@ import MenuButton from "./components/MenuButton";
|
||||
import AlertView from "./AlertView";
|
||||
import AlertEdit from "./AlertEdit";
|
||||
import AlertNew from "./AlertNew";
|
||||
import notifications from "@/services/notifications";
|
||||
|
||||
const MODES = {
|
||||
NEW: 0,
|
||||
@@ -64,6 +65,7 @@ class Alert extends React.Component {
|
||||
this.setState({
|
||||
alert: {
|
||||
options: {
|
||||
selector: "first",
|
||||
op: ">",
|
||||
value: 1,
|
||||
muted: false,
|
||||
@@ -75,7 +77,7 @@ class Alert extends React.Component {
|
||||
} else {
|
||||
const { alertId } = this.props;
|
||||
AlertService.get({ id: alertId })
|
||||
.then(alert => {
|
||||
.then((alert) => {
|
||||
if (this._isMounted) {
|
||||
const canEdit = currentUser.canEdit(alert);
|
||||
|
||||
@@ -93,7 +95,7 @@ class Alert extends React.Component {
|
||||
this.onQuerySelected(alert.query);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
if (this._isMounted) {
|
||||
this.props.onError(error);
|
||||
}
|
||||
@@ -112,7 +114,7 @@ class Alert extends React.Component {
|
||||
alert.rearm = pendingRearm || null;
|
||||
|
||||
return AlertService.save(alert)
|
||||
.then(alert => {
|
||||
.then((alert) => {
|
||||
notification.success("Saved.");
|
||||
navigateTo(`alerts/${alert.id}`, true);
|
||||
this.setState({ alert, mode: MODES.VIEW });
|
||||
@@ -122,7 +124,7 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
onQuerySelected = query => {
|
||||
onQuerySelected = (query) => {
|
||||
this.setState(({ alert }) => ({
|
||||
alert: Object.assign(alert, { query }),
|
||||
queryResult: null,
|
||||
@@ -130,7 +132,7 @@ class Alert extends React.Component {
|
||||
|
||||
if (query) {
|
||||
// get cached result for column names and values
|
||||
new QueryService(query).getQueryResultPromise().then(queryResult => {
|
||||
new QueryService(query).getQueryResultPromise().then((queryResult) => {
|
||||
if (this._isMounted) {
|
||||
this.setState({ queryResult });
|
||||
let { column } = this.state.alert.options;
|
||||
@@ -146,18 +148,18 @@ class Alert extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onNameChange = name => {
|
||||
onNameChange = (name) => {
|
||||
const { alert } = this.state;
|
||||
this.setState({
|
||||
alert: Object.assign(alert, { name }),
|
||||
});
|
||||
};
|
||||
|
||||
onRearmChange = pendingRearm => {
|
||||
onRearmChange = (pendingRearm) => {
|
||||
this.setState({ pendingRearm });
|
||||
};
|
||||
|
||||
setAlertOptions = obj => {
|
||||
setAlertOptions = (obj) => {
|
||||
const { alert } = this.state;
|
||||
const options = { ...alert.options, ...obj };
|
||||
this.setState({
|
||||
@@ -177,6 +179,17 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
evaluate = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.evaluate(alert)
|
||||
.then(() => {
|
||||
notification.success("Alert evaluated. Refresh page for updated status.");
|
||||
})
|
||||
.catch(() => {
|
||||
notifications.error("Failed to evaluate alert.");
|
||||
});
|
||||
};
|
||||
|
||||
mute = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.mute(alert)
|
||||
@@ -223,7 +236,14 @@ class Alert extends React.Component {
|
||||
const { queryResult, mode, canEdit, pendingRearm } = this.state;
|
||||
|
||||
const menuButton = (
|
||||
<MenuButton doDelete={this.delete} muted={muted} mute={this.mute} unmute={this.unmute} canEdit={canEdit} />
|
||||
<MenuButton
|
||||
doDelete={this.delete}
|
||||
muted={muted}
|
||||
mute={this.mute}
|
||||
unmute={this.unmute}
|
||||
canEdit={canEdit}
|
||||
evaluate={this.evaluate}
|
||||
/>
|
||||
);
|
||||
|
||||
const commonProps = {
|
||||
@@ -258,7 +278,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/new",
|
||||
title: "New Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -266,7 +286,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId",
|
||||
title: "Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -274,6 +294,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId/edit",
|
||||
title: "Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -54,23 +54,74 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
return null;
|
||||
})();
|
||||
|
||||
const columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
let columnHint;
|
||||
|
||||
if (alertOptions.selector === "first") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "max") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Max column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.max(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "min") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Min column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.min(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div data-test="Criteria">
|
||||
<div className="input-title">
|
||||
<span className="input-label">Selector</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.selector}
|
||||
onChange={(selector) => onChange({ selector })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 80 }}
|
||||
>
|
||||
<Select.Option value="first" label="first">
|
||||
first
|
||||
</Select.Option>
|
||||
<Select.Option value="min" label="min">
|
||||
min
|
||||
</Select.Option>
|
||||
<Select.Option value="max" label="max">
|
||||
max
|
||||
</Select.Option>
|
||||
</Select>
|
||||
) : (
|
||||
<DisabledInput minWidth={60}>{alertOptions.selector}</DisabledInput>
|
||||
)}
|
||||
</div>
|
||||
<div className="input-title">
|
||||
<span className="input-label">Value column</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.column}
|
||||
onChange={column => onChange({ column })}
|
||||
onChange={(column) => onChange({ column })}
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ minWidth: 100 }}>
|
||||
{columnNames.map(name => (
|
||||
style={{ minWidth: 100 }}
|
||||
>
|
||||
{columnNames.map((name) => (
|
||||
<Select.Option key={name}>{name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
@@ -83,10 +134,11 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.op}
|
||||
onChange={op => onChange({ op })}
|
||||
onChange={(op) => onChange({ op })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 55 }}>
|
||||
style={{ width: 55 }}
|
||||
>
|
||||
<Select.Option value=">" label={CONDITIONS[">"]}>
|
||||
{CONDITIONS[">"]} greater than
|
||||
</Select.Option>
|
||||
@@ -125,7 +177,7 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
id="threshold-criterion"
|
||||
style={{ width: 90 }}
|
||||
value={alertOptions.value}
|
||||
onChange={e => onChange({ value: e.target.value })}
|
||||
onChange={(e) => onChange({ value: e.target.value })}
|
||||
/>
|
||||
) : (
|
||||
<DisabledInput minWidth={50}>{alertOptions.value}</DisabledInput>
|
||||
|
||||
@@ -11,7 +11,7 @@ import LoadingOutlinedIcon from "@ant-design/icons/LoadingOutlined";
|
||||
import EllipsisOutlinedIcon from "@ant-design/icons/EllipsisOutlined";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate, muted }) {
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const execute = useCallback(action => {
|
||||
@@ -55,6 +55,9 @@ export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={confirmDelete}>Delete</PlainButton>
|
||||
</Menu.Item>
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={() => execute(evaluate)}>Evaluate</PlainButton>
|
||||
</Menu.Item>
|
||||
</Menu>
|
||||
}>
|
||||
<Button aria-label="More actions">
|
||||
@@ -69,6 +72,7 @@ MenuButton.propTypes = {
|
||||
canEdit: PropTypes.bool.isRequired,
|
||||
mute: PropTypes.func.isRequired,
|
||||
unmute: PropTypes.func.isRequired,
|
||||
evaluate: PropTypes.func.isRequired,
|
||||
muted: PropTypes.bool,
|
||||
};
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import Link from "@/components/Link";
|
||||
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
||||
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import BeaconConsent from "@/components/BeaconConsent";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
import { axios } from "@/services/axios";
|
||||
@@ -30,7 +31,8 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
<Link
|
||||
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer">
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Read more
|
||||
</Link>
|
||||
.
|
||||
@@ -42,7 +44,7 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
|
||||
function EmailNotVerifiedAlert() {
|
||||
const verifyEmail = () => {
|
||||
axios.post("verification_email/").then(data => {
|
||||
axios.post("verification_email/").then((data) => {
|
||||
notification.success(data.message);
|
||||
});
|
||||
};
|
||||
@@ -88,6 +90,7 @@ export default function Home() {
|
||||
</DynamicComponent>
|
||||
<DynamicComponent name="HomeExtra" />
|
||||
<DashboardAndQueryFavoritesList />
|
||||
<BeaconConsent />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -98,6 +101,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/",
|
||||
title: "Redash",
|
||||
render: pageProps => <Home {...pageProps} />,
|
||||
render: (pageProps) => <Home {...pageProps} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -160,14 +160,15 @@ function QueriesList({ controller }) {
|
||||
orderByField={controller.orderByField}
|
||||
orderByReverse={controller.orderByReverse}
|
||||
toggleSorting={controller.toggleSorting}
|
||||
setSorting={controller.setSorting}
|
||||
/>
|
||||
<Paginator
|
||||
showPageSizeSelect
|
||||
totalCount={controller.totalItemsCount}
|
||||
pageSize={controller.itemsPerPage}
|
||||
onPageSizeChange={itemsPerPage => controller.updatePagination({ itemsPerPage })}
|
||||
onPageSizeChange={(itemsPerPage) => controller.updatePagination({ itemsPerPage })}
|
||||
page={controller.page}
|
||||
onChange={page => controller.updatePagination({ page })}
|
||||
onChange={(page) => controller.updatePagination({ page })}
|
||||
/>
|
||||
</div>
|
||||
</React.Fragment>
|
||||
@@ -196,7 +197,7 @@ const QueriesListPage = itemsList(
|
||||
}[currentPage];
|
||||
},
|
||||
getItemProcessor() {
|
||||
return item => new Query(item);
|
||||
return (item) => new Query(item);
|
||||
},
|
||||
}),
|
||||
() => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true })
|
||||
@@ -207,7 +208,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries",
|
||||
title: "Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="all" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="all" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -215,7 +216,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/favorites",
|
||||
title: "Favorite Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="favorites" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="favorites" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -223,7 +224,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/archive",
|
||||
title: "Archived Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="archive" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="archive" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -231,6 +232,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/my",
|
||||
title: "My Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="my" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="my" />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -9,6 +9,7 @@ import QueryControlDropdown from "@/components/EditVisualizationButton/QueryCont
|
||||
import EditVisualizationButton from "@/components/EditVisualizationButton";
|
||||
import useQueryResultData from "@/lib/useQueryResultData";
|
||||
import { durationHumanize, pluralize, prettySize } from "@/lib/utils";
|
||||
import { isUndefined } from "lodash";
|
||||
|
||||
import "./QueryExecutionMetadata.less";
|
||||
|
||||
@@ -51,7 +52,8 @@ export default function QueryExecutionMetadata({
|
||||
"Result truncated to " +
|
||||
queryResultData.rows.length +
|
||||
" rows. Databricks may truncate query results that are unstably large."
|
||||
}>
|
||||
}
|
||||
>
|
||||
<WarningTwoTone twoToneColor="#FF9800" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
@@ -67,10 +69,9 @@ export default function QueryExecutionMetadata({
|
||||
)}
|
||||
{isQueryExecuting && <span>Running…</span>}
|
||||
</span>
|
||||
{queryResultData.metadata.data_scanned && (
|
||||
{!isUndefined(queryResultData.metadata.data_scanned) && !isQueryExecuting && (
|
||||
<span className="m-l-5">
|
||||
Data Scanned
|
||||
<strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
Data Scanned <strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
|
||||
@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
|
||||
import React from "react";
|
||||
|
||||
export function QuerySourceTypeIcon(props) {
|
||||
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
}
|
||||
|
||||
QuerySourceTypeIcon.propTypes = {
|
||||
|
||||
@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
|
||||
<div className="query-results-empty-state">
|
||||
<div className="empty-state-content">
|
||||
<div>
|
||||
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
||||
<img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
||||
</div>
|
||||
<h3>{title}</h3>
|
||||
<div className="m-b-20">{message}</div>
|
||||
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
|
||||
|
||||
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
|
||||
const handleDelete = useCallback(
|
||||
e => {
|
||||
(e) => {
|
||||
e.stopPropagation();
|
||||
Modal.confirm({
|
||||
title: "Delete Visualization",
|
||||
@@ -111,7 +111,8 @@ export default function QueryVisualizationTabs({
|
||||
className="add-visualization-button"
|
||||
data-test="NewVisualization"
|
||||
type="link"
|
||||
onClick={() => onAddVisualization()}>
|
||||
onClick={() => onAddVisualization()}
|
||||
>
|
||||
<i className="fa fa-plus" aria-hidden="true" />
|
||||
<span className="m-l-5 hidden-xs">Add Visualization</span>
|
||||
</Button>
|
||||
@@ -119,7 +120,7 @@ export default function QueryVisualizationTabs({
|
||||
}
|
||||
|
||||
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
|
||||
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||
const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||
const isMobile = useMedia({ maxWidth: 768 });
|
||||
|
||||
const [filters, setFilters] = useState([]);
|
||||
@@ -132,9 +133,10 @@ export default function QueryVisualizationTabs({
|
||||
data-test="QueryPageVisualizationTabs"
|
||||
animated={false}
|
||||
tabBarGutter={0}
|
||||
onChange={activeKey => onChangeTab(+activeKey)}
|
||||
destroyInactiveTabPane>
|
||||
{orderedVisualizations.map(visualization => (
|
||||
onChange={(activeKey) => onChangeTab(+activeKey)}
|
||||
destroyInactiveTabPane
|
||||
>
|
||||
{orderedVisualizations.map((visualization) => (
|
||||
<TabPane
|
||||
key={`${visualization.id}`}
|
||||
tab={
|
||||
@@ -144,7 +146,8 @@ export default function QueryVisualizationTabs({
|
||||
visualizationName={visualization.name}
|
||||
onDelete={() => onDeleteVisualization(visualization.id)}
|
||||
/>
|
||||
}>
|
||||
}
|
||||
>
|
||||
{queryResult ? (
|
||||
<VisualizationRenderer
|
||||
visualization={visualization}
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { reduce } from "lodash";
|
||||
import localOptions from "@/lib/localOptions";
|
||||
|
||||
function calculateTokensCount(schema) {
|
||||
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
|
||||
}
|
||||
|
||||
export default function useAutocompleteFlags(schema) {
|
||||
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]);
|
||||
const isAvailable = true;
|
||||
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
|
||||
|
||||
const toggleAutocomplete = useCallback(state => {
|
||||
const toggleAutocomplete = useCallback((state) => {
|
||||
setIsEnabled(state);
|
||||
localOptions.set("liveAutocomplete", state);
|
||||
}, []);
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
import React from "react";
|
||||
import Form from "antd/lib/form";
|
||||
import Checkbox from "antd/lib/checkbox";
|
||||
import Skeleton from "antd/lib/skeleton";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
|
||||
|
||||
export default function BeaconConsentSettings(props) {
|
||||
const { values, onChange, loading } = props;
|
||||
|
||||
return (
|
||||
<DynamicComponent name="OrganizationSettings.BeaconConsentSettings" {...props}>
|
||||
<Form.Item
|
||||
label={
|
||||
<span>
|
||||
Anonymous Usage Data Sharing
|
||||
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||
</span>
|
||||
}
|
||||
>
|
||||
{loading ? (
|
||||
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||
) : (
|
||||
<Checkbox
|
||||
name="beacon_consent"
|
||||
checked={values.beacon_consent}
|
||||
onChange={(e) => onChange({ beacon_consent: e.target.checked })}
|
||||
>
|
||||
Help Redash improve by automatically sending anonymous usage data
|
||||
</Checkbox>
|
||||
)}
|
||||
</Form.Item>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
|
||||
|
||||
BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;
|
||||
@@ -4,6 +4,7 @@ import DynamicComponent from "@/components/DynamicComponent";
|
||||
import FormatSettings from "./FormatSettings";
|
||||
import PlotlySettings from "./PlotlySettings";
|
||||
import FeatureFlagsSettings from "./FeatureFlagsSettings";
|
||||
import BeaconConsentSettings from "./BeaconConsentSettings";
|
||||
|
||||
export default function GeneralSettings(props) {
|
||||
return (
|
||||
@@ -13,6 +14,7 @@ export default function GeneralSettings(props) {
|
||||
<FormatSettings {...props} />
|
||||
<PlotlySettings {...props} />
|
||||
<FeatureFlagsSettings {...props} />
|
||||
<BeaconConsentSettings {...props} />
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -36,6 +36,7 @@ const Alert = {
|
||||
delete: data => axios.delete(`api/alerts/${data.id}`),
|
||||
mute: data => axios.post(`api/alerts/${data.id}/mute`),
|
||||
unmute: data => axios.delete(`api/alerts/${data.id}/mute`),
|
||||
evaluate: data => axios.post(`api/alerts/${data.id}/eval`),
|
||||
};
|
||||
|
||||
export default Alert;
|
||||
|
||||
@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
|
||||
|
||||
export const SCHEMA_NOT_SUPPORTED = 1;
|
||||
export const SCHEMA_LOAD_ERROR = 2;
|
||||
export const IMG_ROOT = "static/images/db-logos";
|
||||
export const IMG_ROOT = "/static/images/db-logos";
|
||||
|
||||
function mapSchemaColumnsToObject(columns) {
|
||||
return map(columns, column => (isObject(column) ? column : { name: column }));
|
||||
return map(columns, (column) => (isObject(column) ? column : { name: column }));
|
||||
}
|
||||
|
||||
const DataSource = {
|
||||
query: () => axios.get("api/data_sources"),
|
||||
get: ({ id }) => axios.get(`api/data_sources/${id}`),
|
||||
types: () => axios.get("api/data_sources/types"),
|
||||
create: data => axios.post(`api/data_sources`, data),
|
||||
save: data => axios.post(`api/data_sources/${data.id}`, data),
|
||||
test: data => axios.post(`api/data_sources/${data.id}/test`),
|
||||
create: (data) => axios.post(`api/data_sources`, data),
|
||||
save: (data) => axios.post(`api/data_sources/${data.id}`, data),
|
||||
test: (data) => axios.post(`api/data_sources/${data.id}/test`),
|
||||
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
|
||||
fetchSchema: (data, refresh = false) => {
|
||||
const params = {};
|
||||
@@ -27,15 +27,15 @@ const DataSource = {
|
||||
|
||||
return axios
|
||||
.get(`api/data_sources/${data.id}/schema`, { params })
|
||||
.then(data => {
|
||||
.then((data) => {
|
||||
if (has(data, "job")) {
|
||||
return fetchDataFromJob(data.job.id).catch(error =>
|
||||
return fetchDataFromJob(data.job.id).catch((error) =>
|
||||
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
|
||||
);
|
||||
}
|
||||
return has(data, "schema") ? data.schema : Promise.reject();
|
||||
})
|
||||
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
||||
.then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ const logger = debug("redash:services:QueryResult");
|
||||
const filterTypes = ["filter", "multi-filter", "multiFilter"];
|
||||
|
||||
function defer() {
|
||||
const result = { onStatusChange: status => {} };
|
||||
const result = { onStatusChange: (status) => {} };
|
||||
result.promise = new Promise((resolve, reject) => {
|
||||
result.resolve = resolve;
|
||||
result.reject = reject;
|
||||
@@ -40,13 +40,13 @@ function getColumnNameWithoutType(column) {
|
||||
}
|
||||
|
||||
function getColumnFriendlyName(column) {
|
||||
return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, a => a.toUpperCase());
|
||||
return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, (a) => a.toUpperCase());
|
||||
}
|
||||
|
||||
const createOrSaveUrl = data => (data.id ? `api/query_results/${data.id}` : "api/query_results");
|
||||
const createOrSaveUrl = (data) => (data.id ? `api/query_results/${data.id}` : "api/query_results");
|
||||
const QueryResultResource = {
|
||||
get: ({ id }) => axios.get(`api/query_results/${id}`),
|
||||
post: data => axios.post(createOrSaveUrl(data), data),
|
||||
post: (data) => axios.post(createOrSaveUrl(data), data),
|
||||
};
|
||||
|
||||
export const ExecutionStatus = {
|
||||
@@ -97,11 +97,11 @@ function handleErrorResponse(queryResult, error) {
|
||||
}
|
||||
|
||||
function sleep(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
return axios.get(`api/jobs/${jobId}`).then(data => {
|
||||
return axios.get(`api/jobs/${jobId}`).then((data) => {
|
||||
const status = statuses[data.job.status];
|
||||
if (status === ExecutionStatus.WAITING || status === ExecutionStatus.PROCESSING) {
|
||||
return sleep(interval).then(() => fetchDataFromJob(data.job.id));
|
||||
@@ -146,7 +146,7 @@ class QueryResult {
|
||||
// TODO: we should stop manipulating incoming data, and switch to relaying
|
||||
// on the column type set by the backend. This logic is prone to errors,
|
||||
// and better be removed. Kept for now, for backward compatability.
|
||||
each(this.query_result.data.rows, row => {
|
||||
each(this.query_result.data.rows, (row) => {
|
||||
forOwn(row, (v, k) => {
|
||||
let newType = null;
|
||||
if (isNumber(v)) {
|
||||
@@ -173,7 +173,7 @@ class QueryResult {
|
||||
});
|
||||
});
|
||||
|
||||
each(this.query_result.data.columns, column => {
|
||||
each(this.query_result.data.columns, (column) => {
|
||||
column.name = "" + column.name;
|
||||
if (columnTypes[column.name]) {
|
||||
if (column.type == null || column.type === "string") {
|
||||
@@ -265,14 +265,14 @@ class QueryResult {
|
||||
|
||||
getColumnNames() {
|
||||
if (this.columnNames === undefined && this.query_result.data) {
|
||||
this.columnNames = this.query_result.data.columns.map(v => v.name);
|
||||
this.columnNames = this.query_result.data.columns.map((v) => v.name);
|
||||
}
|
||||
|
||||
return this.columnNames;
|
||||
}
|
||||
|
||||
getColumnFriendlyNames() {
|
||||
return this.getColumnNames().map(col => getColumnFriendlyName(col));
|
||||
return this.getColumnNames().map((col) => getColumnFriendlyName(col));
|
||||
}
|
||||
|
||||
getTruncated() {
|
||||
@@ -286,7 +286,7 @@ class QueryResult {
|
||||
|
||||
const filters = [];
|
||||
|
||||
this.getColumns().forEach(col => {
|
||||
this.getColumns().forEach((col) => {
|
||||
const name = col.name;
|
||||
const type = name.split("::")[1] || name.split("__")[1];
|
||||
if (includes(filterTypes, type)) {
|
||||
@@ -302,8 +302,8 @@ class QueryResult {
|
||||
}
|
||||
}, this);
|
||||
|
||||
this.getRawData().forEach(row => {
|
||||
filters.forEach(filter => {
|
||||
this.getRawData().forEach((row) => {
|
||||
filters.forEach((filter) => {
|
||||
filter.values.push(row[filter.name]);
|
||||
if (filter.values.length === 1) {
|
||||
if (filter.multiple) {
|
||||
@@ -315,8 +315,8 @@ class QueryResult {
|
||||
});
|
||||
});
|
||||
|
||||
filters.forEach(filter => {
|
||||
filter.values = uniqBy(filter.values, v => {
|
||||
filters.forEach((filter) => {
|
||||
filter.values = uniqBy(filter.values, (v) => {
|
||||
if (moment.isMoment(v)) {
|
||||
return v.unix();
|
||||
}
|
||||
@@ -345,12 +345,12 @@ class QueryResult {
|
||||
|
||||
axios
|
||||
.get(`api/queries/${queryId}/results/${id}.json`)
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
// Success handler
|
||||
queryResult.isLoadingResult = false;
|
||||
queryResult.update(response);
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
// Error handler
|
||||
queryResult.isLoadingResult = false;
|
||||
handleErrorResponse(queryResult, error);
|
||||
@@ -362,10 +362,10 @@ class QueryResult {
|
||||
loadLatestCachedResult(queryId, parameters) {
|
||||
axios
|
||||
.post(`api/queries/${queryId}/results`, { queryId, parameters })
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
this.update(response);
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
handleErrorResponse(this, error);
|
||||
});
|
||||
}
|
||||
@@ -375,11 +375,11 @@ class QueryResult {
|
||||
this.deferred.onStatusChange(ExecutionStatus.LOADING_RESULT);
|
||||
|
||||
QueryResultResource.get({ id: this.job.query_result_id })
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
this.update(response);
|
||||
this.isLoadingResult = false;
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
if (tryCount === undefined) {
|
||||
tryCount = 0;
|
||||
}
|
||||
@@ -394,9 +394,12 @@ class QueryResult {
|
||||
});
|
||||
this.isLoadingResult = false;
|
||||
} else {
|
||||
setTimeout(() => {
|
||||
this.loadResult(tryCount + 1);
|
||||
}, 1000 * Math.pow(2, tryCount));
|
||||
setTimeout(
|
||||
() => {
|
||||
this.loadResult(tryCount + 1);
|
||||
},
|
||||
1000 * Math.pow(2, tryCount)
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -410,19 +413,26 @@ class QueryResult {
|
||||
: axios.get(`api/queries/${query}/jobs/${this.job.id}`);
|
||||
|
||||
request
|
||||
.then(jobResponse => {
|
||||
.then((jobResponse) => {
|
||||
this.update(jobResponse);
|
||||
|
||||
if (this.getStatus() === "processing" && this.job.query_result_id && this.job.query_result_id !== "None") {
|
||||
loadResult();
|
||||
} else if (this.getStatus() !== "failed") {
|
||||
const waitTime = tryNumber > 10 ? 3000 : 500;
|
||||
let waitTime;
|
||||
if (tryNumber <= 10) {
|
||||
waitTime = 500;
|
||||
} else if (tryNumber <= 50) {
|
||||
waitTime = 1000;
|
||||
} else {
|
||||
waitTime = 3000;
|
||||
}
|
||||
setTimeout(() => {
|
||||
this.refreshStatus(query, parameters, tryNumber + 1);
|
||||
}, waitTime);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
logger("Connection error", error);
|
||||
// TODO: use QueryResultError, or better yet: exception/reject of promise.
|
||||
this.update({
|
||||
@@ -451,14 +461,14 @@ class QueryResult {
|
||||
|
||||
axios
|
||||
.post(`api/queries/${id}/results`, { id, parameters, apply_auto_limit: applyAutoLimit, max_age: maxAge })
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
queryResult.update(response);
|
||||
|
||||
if ("job" in response) {
|
||||
queryResult.refreshStatus(id, parameters);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
handleErrorResponse(queryResult, error);
|
||||
});
|
||||
|
||||
@@ -481,14 +491,14 @@ class QueryResult {
|
||||
}
|
||||
|
||||
QueryResultResource.post(params)
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
queryResult.update(response);
|
||||
|
||||
if ("job" in response) {
|
||||
queryResult.refreshStatus(query, parameters);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
handleErrorResponse(queryResult, error);
|
||||
});
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ function runCypressCI() {
|
||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||
} = process.env;
|
||||
|
||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
||||
if (GITHUB_REPOSITORY === "getredash/redash" && process.env.CYPRESS_RECORD_KEY) {
|
||||
process.env.CYPRESS_OPTIONS = "--record";
|
||||
}
|
||||
|
||||
|
||||
@@ -26,33 +26,33 @@ const SQL = `
|
||||
describe("Chart", () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
cy.createQuery({ name: "Chart Visualization", query: SQL })
|
||||
.its("id")
|
||||
.as("queryId");
|
||||
cy.createQuery({ name: "Chart Visualization", query: SQL }).its("id").as("queryId");
|
||||
});
|
||||
|
||||
it("creates Bar charts", function() {
|
||||
it("creates Bar charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
|
||||
const getBarChartAssertionFunction = (specificBarChartAssertionFn = () => {}) => () => {
|
||||
// checks for TabbedEditor standard tabs
|
||||
assertTabbedEditor();
|
||||
const getBarChartAssertionFunction =
|
||||
(specificBarChartAssertionFn = () => {}) =>
|
||||
() => {
|
||||
// checks for TabbedEditor standard tabs
|
||||
assertTabbedEditor();
|
||||
|
||||
// standard chart should be bar
|
||||
cy.getByTestId("Chart.GlobalSeriesType").contains(".ant-select-selection-item", "Bar");
|
||||
// standard chart should be bar
|
||||
cy.getByTestId("Chart.GlobalSeriesType").contains(".ant-select-selection-item", "Bar");
|
||||
|
||||
// checks the plot canvas exists and is empty
|
||||
assertPlotPreview("not.exist");
|
||||
// checks the plot canvas exists and is empty
|
||||
assertPlotPreview("not.exist");
|
||||
|
||||
// creates a chart and checks it is plotted
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value2");
|
||||
assertPlotPreview("exist");
|
||||
// creates a chart and checks it is plotted
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value2");
|
||||
assertPlotPreview("exist");
|
||||
|
||||
specificBarChartAssertionFn();
|
||||
};
|
||||
specificBarChartAssertionFn();
|
||||
};
|
||||
|
||||
const chartTests = [
|
||||
{
|
||||
@@ -95,8 +95,8 @@ describe("Chart", () => {
|
||||
|
||||
const withDashboardWidgetsAssertionFn = (widgetGetters, dashboardUrl) => {
|
||||
cy.visit(dashboardUrl);
|
||||
widgetGetters.forEach(widgetGetter => {
|
||||
cy.get(`@${widgetGetter}`).then(widget => {
|
||||
widgetGetters.forEach((widgetGetter) => {
|
||||
cy.get(`@${widgetGetter}`).then((widget) => {
|
||||
cy.getByTestId(getWidgetTestId(widget)).within(() => {
|
||||
cy.get("g.points").should("exist");
|
||||
});
|
||||
@@ -107,4 +107,34 @@ describe("Chart", () => {
|
||||
createDashboardWithCharts("Bar chart visualizations", chartGetters, withDashboardWidgetsAssertionFn);
|
||||
cy.percySnapshot("Visualizations - Charts - Bar");
|
||||
});
|
||||
it("colors Bar charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionViridis").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionTableau 10").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionD3 Category 10").click();
|
||||
});
|
||||
it("colors Pie charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.GlobalSeriesType").click();
|
||||
cy.getByTestId("Chart.ChartType.pie").click();
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionViridis").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionTableau 10").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionD3 Category 10").click();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
const loremIpsum =
|
||||
"Lorem ipsum dolor sit amet consectetur adipiscing elit" +
|
||||
"sed do eiusmod tempor incididunt ut labore et dolore magna aliqua";
|
||||
|
||||
export const query = `
|
||||
SELECT '${loremIpsum}' AS a, '${loremIpsum}' AS b, '${loremIpsum}' AS c, '${loremIpsum}' AS d, '${loremIpsum}' as e
|
||||
`;
|
||||
|
||||
export const config = {
|
||||
itemsPerPage: 10,
|
||||
columns: [
|
||||
{
|
||||
name: "a",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "c",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "d",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "e",
|
||||
displayAs: "string",
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -8,7 +8,6 @@ import * as AllCellTypes from "./.mocks/all-cell-types";
|
||||
import * as MultiColumnSort from "./.mocks/multi-column-sort";
|
||||
import * as SearchInData from "./.mocks/search-in-data";
|
||||
import * as LargeDataset from "./.mocks/large-dataset";
|
||||
import * as WideDataSet from "./.mocks/wide-dataset";
|
||||
|
||||
function prepareVisualization(query, type, name, options) {
|
||||
return cy
|
||||
@@ -23,10 +22,7 @@ function prepareVisualization(query, type, name, options) {
|
||||
cy.get("body").type("{alt}D");
|
||||
|
||||
// do some pre-checks here to ensure that visualization was created and is visible
|
||||
cy.getByTestId("TableVisualization")
|
||||
.should("exist")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("TableVisualization").should("exist").find("table").should("exist");
|
||||
|
||||
return cy.then(() => ({ queryId, visualizationId }));
|
||||
});
|
||||
@@ -54,7 +50,7 @@ describe("Table", () => {
|
||||
});
|
||||
|
||||
describe("Sorting data", () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
const { query, config } = MultiColumnSort;
|
||||
prepareVisualization(query, "TABLE", "Sort data", config).then(({ queryId, visualizationId }) => {
|
||||
this.queryId = queryId;
|
||||
@@ -62,94 +58,30 @@ describe("Table", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("sorts data by a single column", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("c")
|
||||
.should("exist")
|
||||
.click();
|
||||
it("sorts data by a single column", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click();
|
||||
cy.percySnapshot("Visualizations - Table (Single-column sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
it("sorts data by a multiple columns", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("a")
|
||||
.should("exist")
|
||||
.click();
|
||||
it("sorts data by a multiple columns", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("a").should("exist").click();
|
||||
|
||||
cy.get("body").type("{shift}", { release: false });
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("b")
|
||||
.should("exist")
|
||||
.click();
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("b").should("exist").click();
|
||||
|
||||
cy.percySnapshot("Visualizations - Table (Multi-column sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
it("sorts data in reverse order", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("c")
|
||||
.should("exist")
|
||||
.click()
|
||||
.click();
|
||||
it("sorts data in reverse order", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click().click();
|
||||
cy.percySnapshot("Visualizations - Table (Single-column reverse sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
|
||||
describe("Fixing columns", () => {
|
||||
it("fixes the correct number of columns", () => {
|
||||
const { query, config } = WideDataSet;
|
||||
prepareVisualization(query, "TABLE", "All cell types", config);
|
||||
cy.getByTestId("EditVisualization").click();
|
||||
cy.contains("span", "Grid").click();
|
||||
cy.getByTestId("FixedColumns").click();
|
||||
cy.contains(".ant-select-item-option-content", "1").click();
|
||||
cy.contains("Save").click();
|
||||
// eslint-disable-next-line cypress/no-unnecessary-waiting
|
||||
cy.wait(500); //add some waiting to make sure table visualization is saved
|
||||
|
||||
cy.get(".ant-table-thead")
|
||||
.find("th.ant-table-cell-fix-left")
|
||||
.then(fixedCols => {
|
||||
expect(fixedCols.length).to.equal(1);
|
||||
});
|
||||
|
||||
cy.get(".ant-table-content").scrollTo("right", { duration: 1000 });
|
||||
cy.get(".ant-table-content").scrollTo("left", { duration: 1000 });
|
||||
});
|
||||
|
||||
it("doesn't let user fix too many columns", () => {
|
||||
const { query, config } = MultiColumnSort;
|
||||
prepareVisualization(query, "TABLE", "Test data", config);
|
||||
cy.getByTestId("EditVisualization").click();
|
||||
cy.contains("span", "Grid").click();
|
||||
cy.getByTestId("FixedColumns").click();
|
||||
cy.get(".ant-select-item-option-content");
|
||||
cy.contains(".ant-select-item-option-content", "3").should("not.exist");
|
||||
cy.contains(".ant-select-item-option-content", "4").should("not.exist");
|
||||
});
|
||||
|
||||
it("doesn't cause issues when freezing column off of page", () => {
|
||||
const { query, config } = WideDataSet;
|
||||
prepareVisualization(query, "TABLE", "Test data", config);
|
||||
cy.getByTestId("EditVisualization").click();
|
||||
cy.contains("span", "Grid").click();
|
||||
cy.getByTestId("FixedColumns").click();
|
||||
cy.contains(".ant-select-item-option-content", "4").click();
|
||||
cy.contains("Save").click();
|
||||
});
|
||||
});
|
||||
|
||||
it("searches in multiple columns", () => {
|
||||
const { query, config } = SearchInData;
|
||||
prepareVisualization(query, "TABLE", "Search", config).then(({ visualizationId }) => {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table input")
|
||||
.should("exist")
|
||||
.type("test");
|
||||
cy.getByTestId("TableVisualization").find("table input").should("exist").type("test");
|
||||
cy.percySnapshot("Visualizations - Table (Search in data)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
const { extend, get, merge, find } = Cypress._;
|
||||
|
||||
const post = options =>
|
||||
const post = (options) =>
|
||||
cy
|
||||
.getCookie("csrf_token")
|
||||
.then(csrf => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
.then((csrf) => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
|
||||
Cypress.Commands.add("createDashboard", name => {
|
||||
Cypress.Commands.add("createDashboard", (name) => {
|
||||
return post({ url: "api/dashboards", body: { name } }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -28,7 +28,7 @@ Cypress.Commands.add("createQuery", (data, shouldPublish = true) => {
|
||||
// eslint-disable-next-line cypress/no-assigning-return-values
|
||||
let request = post({ url: "/api/queries", body: merged }).then(({ body }) => body);
|
||||
if (shouldPublish) {
|
||||
request = request.then(query =>
|
||||
request = request.then((query) =>
|
||||
post({ url: `/api/queries/${query.id}`, body: { is_draft: false } }).then(() => query)
|
||||
);
|
||||
}
|
||||
@@ -86,6 +86,7 @@ Cypress.Commands.add("addWidget", (dashboardId, visualizationId, options = {}) =
|
||||
Cypress.Commands.add("createAlert", (queryId, options = {}, name) => {
|
||||
const defaultOptions = {
|
||||
column: "?column?",
|
||||
selector: "first",
|
||||
op: "greater than",
|
||||
rearm: 0,
|
||||
value: 1,
|
||||
@@ -109,7 +110,7 @@ Cypress.Commands.add("createUser", ({ name, email, password }) => {
|
||||
url: "api/users?no_invite=yes",
|
||||
body: { name, email },
|
||||
failOnStatusCode: false,
|
||||
}).then(xhr => {
|
||||
}).then((xhr) => {
|
||||
const { status, body } = xhr;
|
||||
if (status < 200 || status > 400) {
|
||||
throw new Error(xhr);
|
||||
@@ -146,7 +147,7 @@ Cypress.Commands.add("getDestinations", () => {
|
||||
Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) => {
|
||||
return cy
|
||||
.getDestinations()
|
||||
.then(destinations => {
|
||||
.then((destinations) => {
|
||||
const destination = find(destinations, { name: destinationName });
|
||||
if (!destination) {
|
||||
throw new Error("Destination not found");
|
||||
@@ -166,6 +167,6 @@ Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) =>
|
||||
});
|
||||
});
|
||||
|
||||
Cypress.Commands.add("updateOrgSettings", settings => {
|
||||
Cypress.Commands.add("updateOrgSettings", (settings) => {
|
||||
return post({ url: "api/settings/organization", body: settings }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -3,36 +3,26 @@
|
||||
* @param should Passed to should expression after plot points are captured
|
||||
*/
|
||||
export function assertPlotPreview(should = "exist") {
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("g.plot")
|
||||
.should("exist")
|
||||
.find("g.points")
|
||||
.should(should);
|
||||
cy.getByTestId("VisualizationPreview").find("g.overplot").should("exist").find("g.points").should(should);
|
||||
}
|
||||
|
||||
export function createChartThroughUI(chartName, chartSpecificAssertionFn = () => {}) {
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.CHART");
|
||||
cy.getByTestId("VisualizationName")
|
||||
.clear()
|
||||
.type(chartName);
|
||||
cy.getByTestId("VisualizationName").clear().type(chartName);
|
||||
|
||||
chartSpecificAssertionFn();
|
||||
|
||||
cy.server();
|
||||
cy.route("POST", "**/api/visualizations").as("SaveVisualization");
|
||||
|
||||
cy.getByTestId("EditVisualizationDialog")
|
||||
.contains("button", "Save")
|
||||
.click();
|
||||
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
|
||||
|
||||
cy.getByTestId("QueryPageVisualizationTabs")
|
||||
.contains("span", chartName)
|
||||
.should("exist");
|
||||
cy.getByTestId("QueryPageVisualizationTabs").contains("span", chartName).should("exist");
|
||||
|
||||
cy.wait("@SaveVisualization").should("have.property", "status", 200);
|
||||
|
||||
return cy.get("@SaveVisualization").then(xhr => {
|
||||
return cy.get("@SaveVisualization").then((xhr) => {
|
||||
const { id, name, options } = xhr.response.body;
|
||||
return cy.wrap({ id, name, options });
|
||||
});
|
||||
@@ -42,19 +32,13 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
|
||||
cy.getByTestId("Chart.GlobalSeriesType").should("exist");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Series").click();
|
||||
cy.getByTestId("VisualizationEditor")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationEditor").find("table").should("exist");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("VisualizationEditor")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationEditor").find("table").should("exist");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.DataLabels").click();
|
||||
cy.getByTestId("VisualizationEditor")
|
||||
.getByTestId("Chart.DataLabels.ShowDataLabels")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationEditor").getByTestId("Chart.DataLabels.ShowDataLabels").should("exist");
|
||||
|
||||
chartSpecificTabbedEditorAssertionFn();
|
||||
|
||||
@@ -63,39 +47,29 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
|
||||
|
||||
export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) {
|
||||
cy.getByTestId("VisualizationEditor.Tabs.XAxis").click();
|
||||
cy.getByTestId("Chart.XAxis.Type")
|
||||
.contains(".ant-select-selection-item", "Auto Detect")
|
||||
.should("exist");
|
||||
cy.getByTestId("Chart.XAxis.Type").contains(".ant-select-selection-item", "Auto Detect").should("exist");
|
||||
|
||||
cy.getByTestId("Chart.XAxis.Name")
|
||||
.clear()
|
||||
.type(xaxisLabel);
|
||||
cy.getByTestId("Chart.XAxis.Name").clear().type(xaxisLabel);
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.YAxis").click();
|
||||
cy.getByTestId("Chart.LeftYAxis.Type")
|
||||
.contains(".ant-select-selection-item", "Linear")
|
||||
.should("exist");
|
||||
cy.getByTestId("Chart.LeftYAxis.Type").contains(".ant-select-selection-item", "Linear").should("exist");
|
||||
|
||||
cy.getByTestId("Chart.LeftYAxis.Name")
|
||||
.clear()
|
||||
.type(yaxisLabel);
|
||||
cy.getByTestId("Chart.LeftYAxis.Name").clear().type(yaxisLabel);
|
||||
|
||||
cy.getByTestId("Chart.LeftYAxis.TickFormat")
|
||||
.clear()
|
||||
.type("+");
|
||||
cy.getByTestId("Chart.LeftYAxis.TickFormat").clear().type("+");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.General").click();
|
||||
}
|
||||
|
||||
export function createDashboardWithCharts(title, chartGetters, widgetsAssertionFn = () => {}) {
|
||||
cy.createDashboard(title).then(dashboard => {
|
||||
cy.createDashboard(title).then((dashboard) => {
|
||||
const dashboardUrl = `/dashboards/${dashboard.id}`;
|
||||
const widgetGetters = chartGetters.map(chartGetter => `${chartGetter}Widget`);
|
||||
const widgetGetters = chartGetters.map((chartGetter) => `${chartGetter}Widget`);
|
||||
|
||||
chartGetters.forEach((chartGetter, i) => {
|
||||
const position = { autoHeight: false, sizeY: 8, sizeX: 3, col: (i % 2) * 3 };
|
||||
cy.get(`@${chartGetter}`)
|
||||
.then(chart => cy.addWidget(dashboard.id, chart.id, { position }))
|
||||
.then((chart) => cy.addWidget(dashboard.id, chart.id, { position }))
|
||||
.as(widgetGetters[i]);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
export function expectTableToHaveLength(length) {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr.ant-table-row")
|
||||
.should("have.length", length);
|
||||
cy.getByTestId("TableVisualization").find("tbody tr").should("have.length", length);
|
||||
}
|
||||
|
||||
export function expectFirstColumnToHaveMembers(values) {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr.ant-table-row td:first-child")
|
||||
.then($cell => Cypress.$.map($cell, item => Cypress.$(item).text()))
|
||||
.then(firstColumnCells => expect(firstColumnCells).to.have.members(values));
|
||||
.find("tbody tr td:first-child")
|
||||
.then(($cell) => Cypress.$.map($cell, (item) => Cypress.$(item).text()))
|
||||
.then((firstColumnCells) => expect(firstColumnCells).to.have.members(values));
|
||||
}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
services:
|
||||
.redash:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
FRONTEND_BUILD_MODE: ${FRONTEND_BUILD_MODE:-2}
|
||||
INSTALL_GROUPS: ${INSTALL_GROUPS:-main,all_ds,dev}
|
||||
volumes:
|
||||
- $PWD:${SERVER_MOUNT:-/ignore}
|
||||
command: manage version
|
||||
environment:
|
||||
REDASH_LOG_LEVEL: INFO
|
||||
REDASH_REDIS_URL: redis://redis:6379/0
|
||||
REDASH_DATABASE_URL: postgresql://postgres@postgres/postgres
|
||||
REDASH_RATELIMIT_ENABLED: false
|
||||
REDASH_MAIL_DEFAULT_SENDER: redash@example.com
|
||||
REDASH_MAIL_SERVER: email
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: true
|
||||
REDASH_COOKIE_SECRET: ${REDASH_COOKIE_SECRET}
|
||||
REDASH_SECRET_KEY: ${REDASH_SECRET_KEY}
|
||||
REDASH_PRODUCTION: ${REDASH_PRODUCTION:-true}
|
||||
env_file:
|
||||
- .env
|
||||
@@ -10,6 +10,7 @@ x-redash-service: &redash-service
|
||||
env_file:
|
||||
- .env
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_HOST: http://localhost:5001
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
|
||||
@@ -15,6 +15,7 @@ from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
)
|
||||
@@ -44,14 +45,7 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
64
migrations/versions/9e8c841d1a30_fix_hash.py
Normal file
64
migrations/versions/9e8c841d1a30_fix_hash.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""fix_hash
|
||||
|
||||
Revision ID: 9e8c841d1a30
|
||||
Revises: 7205816877ec
|
||||
Create Date: 2024-10-05 18:55:35.730573
|
||||
|
||||
"""
|
||||
import logging
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy import select
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, get_query_runner
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9e8c841d1a30'
|
||||
down_revision = '7205816877ec'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def update_query_hash(record):
|
||||
should_apply_auto_limit = record['options'].get("apply_auto_limit", False) if record['options'] else False
|
||||
query_runner = get_query_runner(record['type'], {}) if record['type'] else BaseQueryRunner({})
|
||||
query_text = record['query']
|
||||
|
||||
parameters_dict = {p["name"]: p.get("value") for p in record['options'].get('parameters', [])} if record.options else {}
|
||||
if any(parameters_dict):
|
||||
print(f"Query {record['query_id']} has parameters. Hash might be incorrect.")
|
||||
|
||||
return query_runner.gen_query_hash(query_text, should_apply_auto_limit)
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
metadata = sa.MetaData(bind=conn)
|
||||
queries = sa.Table("queries", metadata, autoload=True)
|
||||
data_sources = sa.Table("data_sources", metadata, autoload=True)
|
||||
|
||||
joined_table = queries.outerjoin(data_sources, queries.c.data_source_id == data_sources.c.id)
|
||||
|
||||
query = select([
|
||||
queries.c.id.label("query_id"),
|
||||
queries.c.query,
|
||||
queries.c.query_hash,
|
||||
queries.c.options,
|
||||
data_sources.c.id.label("data_source_id"),
|
||||
data_sources.c.type
|
||||
]).select_from(joined_table)
|
||||
|
||||
for record in conn.execute(query):
|
||||
new_hash = update_query_hash(record)
|
||||
print(f"Updating hash for query {record['query_id']} from {record['query_hash']} to {new_hash}")
|
||||
conn.execute(
|
||||
queries.update()
|
||||
.where(queries.c.id == record['query_id'])
|
||||
.values(query_hash=new_hash))
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
@@ -14,7 +14,10 @@ from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.base import key_type
|
||||
from redash.models.types import EncryptedConfiguration
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
)
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -42,14 +45,7 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
12
package.json
12
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "24.08.0-dev",
|
||||
"version": "25.07.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
@@ -50,11 +50,12 @@
|
||||
"antd": "^4.4.3",
|
||||
"axios": "0.27.2",
|
||||
"axios-auth-refresh": "3.3.6",
|
||||
"bootstrap": "^3.3.7",
|
||||
"bootstrap": "^3.4.1",
|
||||
"classnames": "^2.2.6",
|
||||
"d3": "^3.5.17",
|
||||
"debug": "^3.2.7",
|
||||
"dompurify": "^2.0.17",
|
||||
"elliptic": "^6.6.0",
|
||||
"font-awesome": "^4.7.0",
|
||||
"history": "^4.10.1",
|
||||
"hoist-non-react-statics": "^3.3.0",
|
||||
@@ -63,7 +64,7 @@
|
||||
"mousetrap": "^1.6.1",
|
||||
"mustache": "^2.3.0",
|
||||
"numeral": "^2.0.6",
|
||||
"path-to-regexp": "^3.1.0",
|
||||
"path-to-regexp": "^3.3.0",
|
||||
"prop-types": "^15.6.1",
|
||||
"query-string": "^6.9.0",
|
||||
"react": "16.14.0",
|
||||
@@ -142,6 +143,7 @@
|
||||
"react-refresh": "^0.14.0",
|
||||
"react-test-renderer": "^16.14.0",
|
||||
"request-cookies": "^1.1.0",
|
||||
"source-map-loader": "^1.1.3",
|
||||
"style-loader": "^2.0.0",
|
||||
"typescript": "^4.1.2",
|
||||
"url-loader": "^4.1.1",
|
||||
@@ -179,8 +181,8 @@
|
||||
]
|
||||
},
|
||||
"browser": {
|
||||
"fs": false,
|
||||
"path": false
|
||||
"fs": false,
|
||||
"path": false
|
||||
},
|
||||
"//": "browserslist set to 'Async functions' compatibility",
|
||||
"browserslist": [
|
||||
|
||||
3082
poetry.lock
generated
3082
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ force-exclude = '''
|
||||
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "24.08.0-dev"
|
||||
version = "25.07.0-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
@@ -29,7 +29,7 @@ authlib = "0.15.5"
|
||||
backoff = "2.2.1"
|
||||
blinker = "1.6.2"
|
||||
click = "8.1.3"
|
||||
cryptography = "41.0.6"
|
||||
cryptography = "43.0.1"
|
||||
disposable-email-domains = ">=0.0.52"
|
||||
flask = "2.3.2"
|
||||
flask-limiter = "3.3.1"
|
||||
@@ -46,7 +46,7 @@ greenlet = "2.0.2"
|
||||
gunicorn = "22.0.0"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.4"
|
||||
jinja2 = "3.1.5"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
@@ -54,7 +54,7 @@ parsedatetime = "2.4"
|
||||
passlib = "1.7.3"
|
||||
psycopg2-binary = "2.9.6"
|
||||
pyjwt = "2.4.0"
|
||||
pyopenssl = "23.2.0"
|
||||
pyopenssl = "24.2.1"
|
||||
pypd = "1.1.0"
|
||||
pysaml2 = "7.3.1"
|
||||
pystache = "0.6.0"
|
||||
@@ -65,11 +65,11 @@ pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.32.3"
|
||||
restrictedpython = "6.2"
|
||||
restrictedpython = "7.3"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
semver = "2.8.1"
|
||||
sentry-sdk = "1.28.1"
|
||||
sentry-sdk = "1.45.1"
|
||||
sqlalchemy = "1.3.24"
|
||||
sqlalchemy-searchable = "1.2.0"
|
||||
sqlalchemy-utils = "0.38.3"
|
||||
@@ -86,13 +86,16 @@ wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
debugpy = "^1.8.9"
|
||||
paramiko = "3.4.1"
|
||||
oracledb = "2.5.1"
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.all_ds.dependencies]
|
||||
atsd-client = "3.0.5"
|
||||
azure-kusto-data = "0.0.35"
|
||||
azure-kusto-data = "5.0.1"
|
||||
boto3 = "1.28.8"
|
||||
botocore = "1.31.8"
|
||||
cassandra-driver = "3.21.0"
|
||||
@@ -107,30 +110,30 @@ influxdb = "5.2.3"
|
||||
influxdb-client = "1.38.0"
|
||||
memsql = "3.2.0"
|
||||
mysqlclient = "2.1.1"
|
||||
numpy = "1.24.4"
|
||||
nzalchemy = "^11.0.2"
|
||||
nzpy = ">=1.15"
|
||||
oauth2client = "4.1.3"
|
||||
openpyxl = "3.0.7"
|
||||
oracledb = "2.1.2"
|
||||
pandas = "1.3.4"
|
||||
phoenixdb = "0.7"
|
||||
pinotdb = ">=0.4.5"
|
||||
protobuf = "3.20.2"
|
||||
pyathena = ">=1.5.0,<=1.11.5"
|
||||
pyathena = "2.25.2"
|
||||
pydgraph = "2.0.2"
|
||||
pydruid = "0.5.7"
|
||||
pyexasol = "0.12.0"
|
||||
pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymssql = "2.2.8"
|
||||
pymssql = "^2.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.1.0"
|
||||
python-rapidjson = "1.20"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
snowflake-connector-python = "3.4.0"
|
||||
snowflake-connector-python = "3.12.3"
|
||||
td-client = "1.0.0"
|
||||
thrift = ">=0.8.0"
|
||||
thrift-sasl = ">=0.1.0"
|
||||
@@ -156,7 +159,6 @@ jwcrypto = "1.5.6"
|
||||
mock = "5.0.2"
|
||||
pre-commit = "3.3.3"
|
||||
ptpython = "3.0.23"
|
||||
ptvsd = "4.3.2"
|
||||
pytest-cov = "4.1.0"
|
||||
watchdog = "3.0.0"
|
||||
ruff = "0.0.289"
|
||||
|
||||
@@ -14,13 +14,14 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "24.08.0-dev"
|
||||
__version__ = "25.07.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
import ptvsd
|
||||
import debugpy
|
||||
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 5678))
|
||||
debugpy.listen(("0.0.0.0", 5678))
|
||||
debugpy.wait_for_client()
|
||||
|
||||
|
||||
def setup_logging():
|
||||
|
||||
@@ -36,10 +36,14 @@ def create_app():
|
||||
from .metrics import request as request_metrics
|
||||
from .models import db, users
|
||||
from .utils import sentry
|
||||
from .version_check import reset_new_version_status
|
||||
|
||||
sentry.init()
|
||||
app = Redash()
|
||||
|
||||
# Check and update the cached version for use by the client
|
||||
reset_new_version_status()
|
||||
|
||||
security.init_app(app)
|
||||
request_metrics.init_app(app)
|
||||
db.init_app(app)
|
||||
|
||||
@@ -5,6 +5,22 @@ from sqlalchemy.orm.exc import NoResultFound
|
||||
manager = AppGroup(help="Queries management commands.")
|
||||
|
||||
|
||||
@manager.command(name="rehash")
|
||||
def rehash():
|
||||
from redash import models
|
||||
|
||||
for q in models.Query.query.all():
|
||||
old_hash = q.query_hash
|
||||
q.update_query_hash()
|
||||
new_hash = q.query_hash
|
||||
|
||||
if old_hash != new_hash:
|
||||
print(f"Query {q.id} has changed hash from {old_hash} to {new_hash}")
|
||||
models.db.session.add(q)
|
||||
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
@manager.command(name="add_tag")
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import html
|
||||
import json
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
|
||||
@@ -37,6 +39,129 @@ class Webex(BaseDestination):
|
||||
|
||||
@staticmethod
|
||||
def formatted_attachments_template(subject, description, query_link, alert_link):
|
||||
# Attempt to parse the description to find a 2D array
|
||||
try:
|
||||
# Extract the part of the description that looks like a JSON array
|
||||
start_index = description.find("[")
|
||||
end_index = description.rfind("]") + 1
|
||||
json_array_str = description[start_index:end_index]
|
||||
|
||||
# Decode HTML entities
|
||||
json_array_str = html.unescape(json_array_str)
|
||||
|
||||
# Replace single quotes with double quotes for valid JSON
|
||||
json_array_str = json_array_str.replace("'", '"')
|
||||
|
||||
# Load the JSON array
|
||||
data_array = json.loads(json_array_str)
|
||||
|
||||
# Check if it's a 2D array
|
||||
if isinstance(data_array, list) and all(isinstance(i, list) for i in data_array):
|
||||
# Create a table for the Adaptive Card
|
||||
table_rows = []
|
||||
for row in data_array:
|
||||
table_rows.append(
|
||||
{
|
||||
"type": "ColumnSet",
|
||||
"columns": [
|
||||
{"type": "Column", "items": [{"type": "TextBlock", "text": str(item), "wrap": True}]}
|
||||
for item in row
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
# Create the body of the card with the table
|
||||
body = (
|
||||
[
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{subject}",
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{description[:start_index]}",
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
]
|
||||
+ table_rows
|
||||
+ [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Fallback to the original description if no valid 2D array is found
|
||||
body = [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{subject}",
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{description}",
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
]
|
||||
except json.JSONDecodeError:
|
||||
# If parsing fails, fallback to the original description
|
||||
body = [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{subject}",
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{description}",
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
]
|
||||
|
||||
return [
|
||||
{
|
||||
"contentType": "application/vnd.microsoft.card.adaptive",
|
||||
@@ -44,44 +169,7 @@ class Webex(BaseDestination):
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type": "AdaptiveCard",
|
||||
"version": "1.0",
|
||||
"body": [
|
||||
{
|
||||
"type": "ColumnSet",
|
||||
"columns": [
|
||||
{
|
||||
"type": "Column",
|
||||
"width": 4,
|
||||
"items": [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": {subject},
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": {description},
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
"body": body,
|
||||
},
|
||||
}
|
||||
]
|
||||
@@ -116,6 +204,10 @@ class Webex(BaseDestination):
|
||||
|
||||
# destinations is guaranteed to be a comma-separated string
|
||||
for destination_id in destinations.split(","):
|
||||
destination_id = destination_id.strip() # Remove any leading or trailing whitespace
|
||||
if not destination_id: # Check if the destination_id is empty or blank
|
||||
continue # Skip to the next iteration if it's empty or blank
|
||||
|
||||
payload = deepcopy(template_payload)
|
||||
payload[payload_tag] = destination_id
|
||||
self.post_message(payload, headers)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from flask import request
|
||||
from funcy import project
|
||||
|
||||
from redash import models
|
||||
from redash import models, utils
|
||||
from redash.handlers.base import (
|
||||
BaseResource,
|
||||
get_object_or_404,
|
||||
@@ -14,6 +14,10 @@ from redash.permissions import (
|
||||
view_only,
|
||||
)
|
||||
from redash.serializers import serialize_alert
|
||||
from redash.tasks.alerts import (
|
||||
notify_subscriptions,
|
||||
should_notify,
|
||||
)
|
||||
|
||||
|
||||
class AlertResource(BaseResource):
|
||||
@@ -43,6 +47,21 @@ class AlertResource(BaseResource):
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
class AlertEvaluateResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
require_admin_or_owner(alert.user.id)
|
||||
|
||||
new_state = alert.evaluate()
|
||||
if should_notify(alert, new_state):
|
||||
alert.state = new_state
|
||||
alert.last_triggered_at = utils.utcnow()
|
||||
models.db.session.commit()
|
||||
|
||||
notify_subscriptions(alert, new_state, {})
|
||||
self.record_event({"action": "evaluate", "object_id": alert.id, "object_type": "alert"})
|
||||
|
||||
|
||||
class AlertMuteResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
|
||||
@@ -3,6 +3,7 @@ from flask_restful import Api
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from redash.handlers.alerts import (
|
||||
AlertEvaluateResource,
|
||||
AlertListResource,
|
||||
AlertMuteResource,
|
||||
AlertResource,
|
||||
@@ -117,6 +118,7 @@ def json_representation(data, code, headers=None):
|
||||
|
||||
api.add_org_resource(AlertResource, "/api/alerts/<alert_id>", endpoint="alert")
|
||||
api.add_org_resource(AlertMuteResource, "/api/alerts/<alert_id>/mute", endpoint="alert_mute")
|
||||
api.add_org_resource(AlertEvaluateResource, "/api/alerts/<alert_id>/eval", endpoint="alert_eval")
|
||||
api.add_org_resource(
|
||||
AlertSubscriptionListResource,
|
||||
"/api/alerts/<alert_id>/subscriptions",
|
||||
|
||||
@@ -15,6 +15,7 @@ from redash.authentication.account import (
|
||||
)
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.version_check import get_latest_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,6 +29,7 @@ def get_google_auth_url(next_path):
|
||||
|
||||
|
||||
def render_token_login_page(template, org_slug, token, invite):
|
||||
error_message = None
|
||||
try:
|
||||
user_id = validate_token(token)
|
||||
org = current_org._get_current_object()
|
||||
@@ -39,19 +41,19 @@ def render_token_login_page(template, org_slug, token, invite):
|
||||
user_id,
|
||||
org_slug,
|
||||
)
|
||||
error_message = "Your invite link is invalid. Bad user id in token. Please ask for a new one."
|
||||
except SignatureExpired:
|
||||
logger.exception("Token signature has expired. Token: %s, org=%s", token, org_slug)
|
||||
error_message = "Your invite link has expired. Please ask for a new one."
|
||||
except BadSignature:
|
||||
logger.exception("Bad signature for the token: %s, org=%s", token, org_slug)
|
||||
error_message = "Your invite link is invalid. Bad signature. Please double-check the token."
|
||||
|
||||
if error_message:
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Invalid invite link. Please ask for a new one.",
|
||||
),
|
||||
400,
|
||||
)
|
||||
except (SignatureExpired, BadSignature):
|
||||
logger.exception("Failed to verify invite token: %s, org=%s", token, org_slug)
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Your invite link has expired. Please ask for a new one.",
|
||||
error_message=error_message,
|
||||
),
|
||||
400,
|
||||
)
|
||||
@@ -255,11 +257,15 @@ def number_format_config():
|
||||
|
||||
def client_config():
|
||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||
client_config_inner = {
|
||||
client_config = {
|
||||
"newVersionAvailable": bool(get_latest_version()),
|
||||
"version": __version__,
|
||||
}
|
||||
else:
|
||||
client_config_inner = {}
|
||||
client_config = {}
|
||||
|
||||
if current_user.has_permission("admin") and current_org.get_setting("beacon_consent") is None:
|
||||
client_config["showBeaconConsentMessage"] = True
|
||||
|
||||
defaults = {
|
||||
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
@@ -279,12 +285,12 @@ def client_config():
|
||||
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||
}
|
||||
|
||||
client_config_inner.update(defaults)
|
||||
client_config_inner.update({"basePath": base_href()})
|
||||
client_config_inner.update(date_time_format_config())
|
||||
client_config_inner.update(number_format_config())
|
||||
client_config.update(defaults)
|
||||
client_config.update({"basePath": base_href()})
|
||||
client_config.update(date_time_format_config())
|
||||
client_config.update(number_format_config())
|
||||
|
||||
return client_config_inner
|
||||
return client_config
|
||||
|
||||
|
||||
def messages():
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
from flask import g, redirect, render_template, request, url_for
|
||||
from flask_login import login_user
|
||||
from wtforms import Form, PasswordField, StringField, validators
|
||||
from wtforms import BooleanField, Form, PasswordField, StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import routes
|
||||
from redash.models import Group, Organization, User, db
|
||||
from redash.tasks.general import subscribe
|
||||
|
||||
|
||||
class SetupForm(Form):
|
||||
@@ -14,6 +15,8 @@ class SetupForm(Form):
|
||||
email = EmailField("Email Address", validators=[validators.Email()])
|
||||
password = PasswordField("Password", validators=[validators.Length(6)])
|
||||
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
||||
security_notifications = BooleanField()
|
||||
newsletter = BooleanField()
|
||||
|
||||
|
||||
def create_org(org_name, user_name, email, password):
|
||||
@@ -54,6 +57,8 @@ def setup():
|
||||
return redirect("/")
|
||||
|
||||
form = SetupForm(request.form)
|
||||
form.newsletter.data = True
|
||||
form.security_notifications.data = True
|
||||
|
||||
if request.method == "POST" and form.validate():
|
||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||
@@ -61,6 +66,10 @@ def setup():
|
||||
g.org = default_org
|
||||
login_user(user)
|
||||
|
||||
# signup to newsletter if needed
|
||||
if form.newsletter.data or form.security_notifications:
|
||||
subscribe.delay(form.data)
|
||||
|
||||
return redirect(url_for("redash.index", org_slug=None))
|
||||
|
||||
return render_template("setup.html", form=form)
|
||||
|
||||
@@ -5,7 +5,7 @@ from flask import g, has_request_context
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.orm.util import _ORMJoin
|
||||
from sqlalchemy.sql.selectable import Alias
|
||||
from sqlalchemy.sql.selectable import Alias, Join
|
||||
|
||||
from redash import statsd_client
|
||||
|
||||
@@ -18,7 +18,7 @@ def _table_name_from_select_element(elt):
|
||||
if isinstance(t, Alias):
|
||||
t = t.original.froms[0]
|
||||
|
||||
while isinstance(t, _ORMJoin):
|
||||
while isinstance(t, _ORMJoin) or isinstance(t, Join):
|
||||
t = t.left
|
||||
|
||||
return t.name
|
||||
|
||||
@@ -46,6 +46,7 @@ from redash.models.parameterized_query import (
|
||||
QueryDetachedFromDataSourceError,
|
||||
)
|
||||
from redash.models.types import (
|
||||
Configuration,
|
||||
EncryptedConfiguration,
|
||||
JSONText,
|
||||
MutableDict,
|
||||
@@ -386,6 +387,10 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
|
||||
|
||||
def should_schedule_next(previous_iteration, now, interval, time=None, day_of_week=None, failures=0):
|
||||
# if previous_iteration is None, it means the query has never been run before
|
||||
# so we should schedule it immediately
|
||||
if previous_iteration is None:
|
||||
return True
|
||||
# if time exists then interval > 23 hours (82800s)
|
||||
# if day_of_week exists then interval > 6 days (518400s)
|
||||
if time is None:
|
||||
@@ -601,6 +606,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
if query.schedule.get("disabled"):
|
||||
continue
|
||||
|
||||
# Skip queries that have None for all schedule values. It's unclear whether this
|
||||
# something that can happen in practice, but we have a test case for it.
|
||||
if all(value is None for value in query.schedule.values()):
|
||||
continue
|
||||
|
||||
if query.schedule["until"]:
|
||||
schedule_until = pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d"))
|
||||
|
||||
@@ -612,7 +622,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
)
|
||||
|
||||
if should_schedule_next(
|
||||
retrieved_at or now,
|
||||
retrieved_at,
|
||||
now,
|
||||
query.schedule["interval"],
|
||||
query.schedule["time"],
|
||||
@@ -898,6 +908,7 @@ def next_state(op, value, threshold):
|
||||
# boolean value is Python specific and most likely will be confusing to
|
||||
# users.
|
||||
value = str(value).lower()
|
||||
value_is_number = False
|
||||
else:
|
||||
try:
|
||||
value = float(value)
|
||||
@@ -915,6 +926,8 @@ def next_state(op, value, threshold):
|
||||
|
||||
if op(value, threshold):
|
||||
new_state = Alert.TRIGGERED_STATE
|
||||
elif not value_is_number and op not in [OPERATORS.get("!="), OPERATORS.get("=="), OPERATORS.get("equals")]:
|
||||
new_state = Alert.UNKNOWN_STATE
|
||||
else:
|
||||
new_state = Alert.OK_STATE
|
||||
|
||||
@@ -926,6 +939,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
UNKNOWN_STATE = "unknown"
|
||||
OK_STATE = "ok"
|
||||
TRIGGERED_STATE = "triggered"
|
||||
TEST_STATE = "test"
|
||||
|
||||
id = primary_key("Alert")
|
||||
name = Column(db.String(255))
|
||||
@@ -955,17 +969,38 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
return super(Alert, cls).get_by_id_and_org(object_id, org, Query)
|
||||
|
||||
def evaluate(self):
|
||||
data = self.query_rel.latest_query_data.data
|
||||
data = self.query_rel.latest_query_data.data if self.query_rel.latest_query_data else None
|
||||
new_state = self.UNKNOWN_STATE
|
||||
|
||||
if data["rows"] and self.options["column"] in data["rows"][0]:
|
||||
if data and data["rows"] and self.options["column"] in data["rows"][0]:
|
||||
op = OPERATORS.get(self.options["op"], lambda v, t: False)
|
||||
|
||||
value = data["rows"][0][self.options["column"]]
|
||||
if "selector" not in self.options:
|
||||
selector = "first"
|
||||
else:
|
||||
selector = self.options["selector"]
|
||||
|
||||
try:
|
||||
if selector == "max":
|
||||
max_val = float("-inf")
|
||||
for i in range(len(data["rows"])):
|
||||
max_val = max(max_val, float(data["rows"][i][self.options["column"]]))
|
||||
value = max_val
|
||||
elif selector == "min":
|
||||
min_val = float("inf")
|
||||
for i in range(len(data["rows"])):
|
||||
min_val = min(min_val, float(data["rows"][i][self.options["column"]]))
|
||||
value = min_val
|
||||
else:
|
||||
value = data["rows"][0][self.options["column"]]
|
||||
|
||||
except ValueError:
|
||||
return self.UNKNOWN_STATE
|
||||
|
||||
threshold = self.options["value"]
|
||||
|
||||
new_state = next_state(op, value, threshold)
|
||||
else:
|
||||
new_state = self.UNKNOWN_STATE
|
||||
if value is not None:
|
||||
new_state = next_state(op, value, threshold)
|
||||
|
||||
return new_state
|
||||
|
||||
@@ -988,11 +1023,11 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
result_table = [] # A two-dimensional array which can rendered as a table in Mustache
|
||||
for row in data["rows"]:
|
||||
result_table.append([row[col["name"]] for col in data["columns"]])
|
||||
|
||||
context = {
|
||||
"ALERT_NAME": self.name,
|
||||
"ALERT_URL": "{host}/alerts/{alert_id}".format(host=host, alert_id=self.id),
|
||||
"ALERT_STATUS": self.state.upper(),
|
||||
"ALERT_SELECTOR": self.options["selector"],
|
||||
"ALERT_CONDITION": self.options["op"],
|
||||
"ALERT_THRESHOLD": self.options["value"],
|
||||
"QUERY_NAME": self.query_rel.name,
|
||||
|
||||
@@ -3,10 +3,21 @@ from sqlalchemy.ext.mutable import Mutable
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
from sqlalchemy_utils import EncryptedType
|
||||
|
||||
from redash.models.base import db
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
|
||||
from .base import db
|
||||
|
||||
|
||||
class Configuration(TypeDecorator):
|
||||
impl = db.Text
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
return value.to_json()
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return ConfigurationContainer.from_json(value)
|
||||
|
||||
|
||||
class EncryptedConfiguration(EncryptedType):
|
||||
def process_bind_param(self, value, dialect):
|
||||
|
||||
@@ -59,7 +59,7 @@ def get_status():
|
||||
|
||||
|
||||
def rq_job_ids():
|
||||
queues = Queue.all(connection=redis_connection)
|
||||
queues = Queue.all(connection=rq_redis_connection)
|
||||
|
||||
started_jobs = [StartedJobRegistry(queue=q).get_job_ids() for q in queues]
|
||||
queued_jobs = [q.job_ids for q in queues]
|
||||
|
||||
@@ -288,7 +288,10 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
return True
|
||||
|
||||
def query_is_select_no_limit(self, query):
|
||||
parsed_query = sqlparse.parse(query)[0]
|
||||
parsed_query_list = sqlparse.parse(query)
|
||||
if len(parsed_query_list) == 0:
|
||||
return False
|
||||
parsed_query = parsed_query_list[0]
|
||||
last_keyword_idx = find_last_keyword_idx(parsed_query)
|
||||
# Either invalid query or query that is not select
|
||||
if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT":
|
||||
|
||||
@@ -90,15 +90,26 @@ class Athena(BaseQueryRunner):
|
||||
"title": "Athena cost per Tb scanned (USD)",
|
||||
"default": 5,
|
||||
},
|
||||
"result_reuse_enable": {
|
||||
"type": "boolean",
|
||||
"title": "Reuse Athena query results",
|
||||
},
|
||||
"result_reuse_minutes": {
|
||||
"type": "number",
|
||||
"title": "Minutes to reuse Athena query results",
|
||||
"default": 60,
|
||||
},
|
||||
},
|
||||
"required": ["region", "s3_staging_dir"],
|
||||
"extra_options": ["glue", "catalog_ids", "cost_per_tb"],
|
||||
"extra_options": ["glue", "catalog_ids", "cost_per_tb", "result_reuse_enable", "result_reuse_minutes"],
|
||||
"order": [
|
||||
"region",
|
||||
"s3_staging_dir",
|
||||
"schema",
|
||||
"work_group",
|
||||
"cost_per_tb",
|
||||
"result_reuse_enable",
|
||||
"result_reuse_minutes",
|
||||
],
|
||||
"secret": ["aws_secret_key"],
|
||||
}
|
||||
@@ -199,10 +210,20 @@ class Athena(BaseQueryRunner):
|
||||
logger.warning("Glue table doesn't have StorageDescriptor: %s", table_name)
|
||||
continue
|
||||
if table_name not in schema:
|
||||
column = [columns["Name"] for columns in table["StorageDescriptor"]["Columns"]]
|
||||
schema[table_name] = {"name": table_name, "columns": column}
|
||||
for partition in table.get("PartitionKeys", []):
|
||||
schema[table_name]["columns"].append(partition["Name"])
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
for column_data in table["StorageDescriptor"]["Columns"]:
|
||||
column = {
|
||||
"name": column_data["Name"],
|
||||
"type": column_data["Type"] if "Type" in column_data else None,
|
||||
}
|
||||
schema[table_name]["columns"].append(column)
|
||||
for partition in table.get("PartitionKeys", []):
|
||||
partition_column = {
|
||||
"name": partition["Name"],
|
||||
"type": partition["Type"] if "Type" in partition else None,
|
||||
}
|
||||
schema[table_name]["columns"].append(partition_column)
|
||||
return list(schema.values())
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
@@ -212,7 +233,7 @@ class Athena(BaseQueryRunner):
|
||||
|
||||
schema = {}
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name
|
||||
SELECT table_schema, table_name, column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('information_schema')
|
||||
"""
|
||||
@@ -225,7 +246,7 @@ class Athena(BaseQueryRunner):
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
schema[table_name]["columns"].append(row["column_name"])
|
||||
schema[table_name]["columns"].append({"name": row["column_name"], "type": row["data_type"]})
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
@@ -237,6 +258,8 @@ class Athena(BaseQueryRunner):
|
||||
kms_key=self.configuration.get("kms_key", None),
|
||||
work_group=self.configuration.get("work_group", "primary"),
|
||||
formatter=SimpleFormatter(),
|
||||
result_reuse_enable=self.configuration.get("result_reuse_enable", False),
|
||||
result_reuse_minutes=self.configuration.get("result_reuse_minutes", 60),
|
||||
**self._get_iam_credentials(user=user),
|
||||
).cursor()
|
||||
|
||||
|
||||
@@ -11,12 +11,12 @@ from redash.query_runner import (
|
||||
from redash.utils import json_loads
|
||||
|
||||
try:
|
||||
from azure.kusto.data.exceptions import KustoServiceError
|
||||
from azure.kusto.data.request import (
|
||||
from azure.kusto.data import (
|
||||
ClientRequestProperties,
|
||||
KustoClient,
|
||||
KustoConnectionStringBuilder,
|
||||
)
|
||||
from azure.kusto.data.exceptions import KustoServiceError
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
@@ -37,6 +37,34 @@ TYPES_MAP = {
|
||||
}
|
||||
|
||||
|
||||
def _get_data_scanned(kusto_response):
|
||||
try:
|
||||
metadata_table = next(
|
||||
(table for table in kusto_response.tables if table.table_name == "QueryCompletionInformation"),
|
||||
None,
|
||||
)
|
||||
|
||||
if metadata_table:
|
||||
resource_usage_json = next(
|
||||
(row["Payload"] for row in metadata_table.rows if row["EventTypeName"] == "QueryResourceConsumption"),
|
||||
"{}",
|
||||
)
|
||||
resource_usage = json_loads(resource_usage_json).get("resource_usage", {})
|
||||
|
||||
data_scanned = (
|
||||
resource_usage["cache"]["shards"]["cold"]["hitbytes"]
|
||||
+ resource_usage["cache"]["shards"]["cold"]["missbytes"]
|
||||
+ resource_usage["cache"]["shards"]["hot"]["hitbytes"]
|
||||
+ resource_usage["cache"]["shards"]["hot"]["missbytes"]
|
||||
+ resource_usage["cache"]["shards"]["bypassbytes"]
|
||||
)
|
||||
|
||||
except Exception:
|
||||
data_scanned = 0
|
||||
|
||||
return int(data_scanned)
|
||||
|
||||
|
||||
class AzureKusto(BaseQueryRunner):
|
||||
should_annotate_query = False
|
||||
noop_query = "let noop = datatable (Noop:string)[1]; noop"
|
||||
@@ -44,8 +72,6 @@ class AzureKusto(BaseQueryRunner):
|
||||
def __init__(self, configuration):
|
||||
super(AzureKusto, self).__init__(configuration)
|
||||
self.syntax = "custom"
|
||||
self.client_request_properties = ClientRequestProperties()
|
||||
self.client_request_properties.application = "redash"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
@@ -60,12 +86,14 @@ class AzureKusto(BaseQueryRunner):
|
||||
},
|
||||
"azure_ad_tenant_id": {"type": "string", "title": "Azure AD Tenant Id"},
|
||||
"database": {"type": "string"},
|
||||
"msi": {"type": "boolean", "title": "Use Managed Service Identity"},
|
||||
"user_msi": {
|
||||
"type": "string",
|
||||
"title": "User-assigned managed identity client ID",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"cluster",
|
||||
"azure_ad_client_id",
|
||||
"azure_ad_client_secret",
|
||||
"azure_ad_tenant_id",
|
||||
"database",
|
||||
],
|
||||
"order": [
|
||||
@@ -91,18 +119,48 @@ class AzureKusto(BaseQueryRunner):
|
||||
return "Azure Data Explorer (Kusto)"
|
||||
|
||||
def run_query(self, query, user):
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
|
||||
connection_string=self.configuration["cluster"],
|
||||
aad_app_id=self.configuration["azure_ad_client_id"],
|
||||
app_key=self.configuration["azure_ad_client_secret"],
|
||||
authority_id=self.configuration["azure_ad_tenant_id"],
|
||||
)
|
||||
cluster = self.configuration["cluster"]
|
||||
msi = self.configuration.get("msi", False)
|
||||
# Managed Service Identity(MSI)
|
||||
if msi:
|
||||
# If user-assigned managed identity is used, the client ID must be provided
|
||||
if self.configuration.get("user_msi"):
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(
|
||||
cluster,
|
||||
client_id=self.configuration["user_msi"],
|
||||
)
|
||||
else:
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(cluster)
|
||||
# Service Principal auth
|
||||
else:
|
||||
aad_app_id = self.configuration.get("azure_ad_client_id")
|
||||
app_key = self.configuration.get("azure_ad_client_secret")
|
||||
authority_id = self.configuration.get("azure_ad_tenant_id")
|
||||
|
||||
if not (aad_app_id and app_key and authority_id):
|
||||
raise ValueError(
|
||||
"Azure AD Client ID, Client Secret, and Tenant ID are required for Service Principal authentication."
|
||||
)
|
||||
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
|
||||
connection_string=cluster,
|
||||
aad_app_id=aad_app_id,
|
||||
app_key=app_key,
|
||||
authority_id=authority_id,
|
||||
)
|
||||
|
||||
client = KustoClient(kcsb)
|
||||
|
||||
request_properties = ClientRequestProperties()
|
||||
request_properties.application = "redash"
|
||||
|
||||
if user:
|
||||
request_properties.user = user.email
|
||||
request_properties.set_option("request_description", user.email)
|
||||
|
||||
db = self.configuration["database"]
|
||||
try:
|
||||
response = client.execute(db, query, self.client_request_properties)
|
||||
response = client.execute(db, query, request_properties)
|
||||
|
||||
result_cols = response.primary_results[0].columns
|
||||
result_rows = response.primary_results[0].rows
|
||||
@@ -123,14 +181,15 @@ class AzureKusto(BaseQueryRunner):
|
||||
rows.append(row.to_dict())
|
||||
|
||||
error = None
|
||||
data = {"columns": columns, "rows": rows}
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows,
|
||||
"metadata": {"data_scanned": _get_data_scanned(response)},
|
||||
}
|
||||
|
||||
except KustoServiceError as err:
|
||||
data = None
|
||||
try:
|
||||
error = err.args[1][0]["error"]["@message"]
|
||||
except (IndexError, KeyError):
|
||||
error = err.args[1]
|
||||
error = str(err)
|
||||
|
||||
return data, error
|
||||
|
||||
@@ -143,7 +202,10 @@ class AzureKusto(BaseQueryRunner):
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
|
||||
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()
|
||||
tables_list = [
|
||||
*(schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()),
|
||||
*(schema_as_json["Databases"][self.configuration["database"]]["MaterializedViews"].values()),
|
||||
]
|
||||
|
||||
schema = {}
|
||||
|
||||
@@ -154,7 +216,9 @@ class AzureKusto(BaseQueryRunner):
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
for column in table["OrderedColumns"]:
|
||||
schema[table_name]["columns"].append(column["Name"])
|
||||
schema[table_name]["columns"].append(
|
||||
{"name": column["Name"], "type": TYPES_MAP.get(column["CslType"], None)}
|
||||
)
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -7,11 +7,12 @@ from base64 import b64decode
|
||||
from redash import settings
|
||||
from redash.query_runner import (
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_DATE,
|
||||
TYPE_DATETIME,
|
||||
TYPE_FLOAT,
|
||||
TYPE_INTEGER,
|
||||
TYPE_STRING,
|
||||
BaseQueryRunner,
|
||||
BaseSQLQueryRunner,
|
||||
InterruptException,
|
||||
JobTimeoutException,
|
||||
register,
|
||||
@@ -37,6 +38,8 @@ types_map = {
|
||||
"BOOLEAN": TYPE_BOOLEAN,
|
||||
"STRING": TYPE_STRING,
|
||||
"TIMESTAMP": TYPE_DATETIME,
|
||||
"DATETIME": TYPE_DATETIME,
|
||||
"DATE": TYPE_DATE,
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +86,7 @@ def _get_query_results(jobs, project_id, location, job_id, start_index):
|
||||
).execute()
|
||||
logging.debug("query_reply %s", query_reply)
|
||||
if not query_reply["jobComplete"]:
|
||||
time.sleep(10)
|
||||
time.sleep(1)
|
||||
return _get_query_results(jobs, project_id, location, job_id, start_index)
|
||||
|
||||
return query_reply
|
||||
@@ -95,7 +98,7 @@ def _get_total_bytes_processed_for_resp(bq_response):
|
||||
return int(bq_response.get("totalBytesProcessed", "0"))
|
||||
|
||||
|
||||
class BigQuery(BaseQueryRunner):
|
||||
class BigQuery(BaseSQLQueryRunner):
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
def __init__(self, configuration):
|
||||
@@ -301,7 +304,7 @@ class BigQuery(BaseQueryRunner):
|
||||
datasets = self._get_project_datasets(project_id)
|
||||
|
||||
query_base = """
|
||||
SELECT table_schema, table_name, field_path
|
||||
SELECT table_schema, table_name, field_path, data_type
|
||||
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
|
||||
WHERE table_schema NOT IN ('information_schema')
|
||||
"""
|
||||
@@ -322,7 +325,7 @@ class BigQuery(BaseQueryRunner):
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
schema[table_name]["columns"].append(row["field_path"])
|
||||
schema[table_name]["columns"].append({"name": row["field_path"], "type": row["data_type"]})
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -91,8 +91,8 @@ class BaseElasticSearch(BaseQueryRunner):
|
||||
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.server_url = self.configuration["server"]
|
||||
if self.server_url[-1] == "/":
|
||||
self.server_url = self.configuration.get("server", "")
|
||||
if self.server_url and self.server_url[-1] == "/":
|
||||
self.server_url = self.server_url[:-1]
|
||||
|
||||
basic_auth_user = self.configuration.get("basic_auth_user", None)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -117,19 +117,31 @@ def parse_results(results: list, flatten: bool = False) -> list:
|
||||
|
||||
parsed_row = _parse_dict(row, flatten)
|
||||
for column_name, value in parsed_row.items():
|
||||
columns.append(
|
||||
{
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(type(value), TYPE_STRING),
|
||||
}
|
||||
)
|
||||
if _get_column_by_name(columns, column_name) is None:
|
||||
columns.append(
|
||||
{
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(type(value), TYPE_STRING),
|
||||
}
|
||||
)
|
||||
|
||||
rows.append(parsed_row)
|
||||
|
||||
return rows, columns
|
||||
|
||||
|
||||
def _sorted_fields(fields):
|
||||
ord = {}
|
||||
for k, v in fields.items():
|
||||
if isinstance(v, int):
|
||||
ord[k] = v
|
||||
else:
|
||||
ord[k] = len(fields)
|
||||
|
||||
return sorted(ord, key=ord.get)
|
||||
|
||||
|
||||
class MongoDB(BaseQueryRunner):
|
||||
should_annotate_query = False
|
||||
|
||||
@@ -176,7 +188,7 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
self.db_name = self.configuration["dbName"]
|
||||
self.db_name = self.configuration.get("dbName", "")
|
||||
|
||||
self.is_replica_set = (
|
||||
True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
@@ -364,7 +376,7 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
for k in sorted(f, key=f.get):
|
||||
for k in _sorted_fields(f):
|
||||
column = _get_column_by_name(columns, k)
|
||||
if column:
|
||||
ordered_columns.append(column)
|
||||
|
||||
@@ -152,7 +152,7 @@ class Mysql(BaseSQLQueryRunner):
|
||||
col.table_name as table_name,
|
||||
col.column_name as column_name
|
||||
FROM `information_schema`.`columns` col
|
||||
WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
|
||||
WHERE LOWER(col.table_schema) NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
|
||||
@@ -388,12 +388,13 @@ class Redshift(PostgreSQL):
|
||||
SELECT DISTINCT table_name,
|
||||
table_schema,
|
||||
column_name,
|
||||
data_type,
|
||||
ordinal_position AS pos
|
||||
FROM svv_columns
|
||||
WHERE table_schema NOT IN ('pg_internal','pg_catalog','information_schema')
|
||||
AND table_schema NOT LIKE 'pg_temp_%'
|
||||
)
|
||||
SELECT table_name, table_schema, column_name
|
||||
SELECT table_name, table_schema, column_name, data_type
|
||||
FROM tables
|
||||
WHERE
|
||||
HAS_SCHEMA_PRIVILEGE(table_schema, 'USAGE') AND
|
||||
|
||||
@@ -55,12 +55,13 @@ class Script(BaseQueryRunner):
|
||||
def __init__(self, configuration):
|
||||
super(Script, self).__init__(configuration)
|
||||
|
||||
path = self.configuration.get("path", "")
|
||||
# If path is * allow any execution path
|
||||
if self.configuration["path"] == "*":
|
||||
if path == "*":
|
||||
return
|
||||
|
||||
# Poor man's protection against running scripts from outside the scripts directory
|
||||
if self.configuration["path"].find("../") > -1:
|
||||
if path.find("../") > -1:
|
||||
raise ValueError("Scripts can only be run from the configured scripts directory")
|
||||
|
||||
def test_connection(self):
|
||||
|
||||
@@ -28,7 +28,7 @@ class Sqlite(BaseSQLQueryRunner):
|
||||
def __init__(self, configuration):
|
||||
super(Sqlite, self).__init__(configuration)
|
||||
|
||||
self._dbpath = self.configuration["dbpath"]
|
||||
self._dbpath = self.configuration.get("dbpath", "")
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query_table = "select tbl_name from sqlite_master where type='table'"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import functools
|
||||
|
||||
from flask import session
|
||||
from flask import request, session
|
||||
from flask_login import current_user
|
||||
from flask_talisman import talisman
|
||||
from flask_wtf.csrf import CSRFProtect, generate_csrf
|
||||
@@ -35,6 +35,15 @@ def init_app(app):
|
||||
|
||||
@app.before_request
|
||||
def check_csrf():
|
||||
# BEGIN workaround until https://github.com/lepture/flask-wtf/pull/419 is merged
|
||||
if request.blueprint in csrf._exempt_blueprints:
|
||||
return
|
||||
|
||||
view = app.view_functions.get(request.endpoint)
|
||||
if view is not None and f"{view.__module__}.{view.__name__}" in csrf._exempt_views:
|
||||
return
|
||||
# END workaround
|
||||
|
||||
if not current_user.is_authenticated or "user_id" in session:
|
||||
csrf.protect()
|
||||
|
||||
|
||||
@@ -50,6 +50,7 @@ QUERY_RESULTS_EXPIRED_TTL_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_R
|
||||
QUERY_RESULTS_EXPIRED_TTL = int(os.environ.get("REDASH_QUERY_RESULTS_EXPIRED_TTL", "86400"))
|
||||
|
||||
SCHEMAS_REFRESH_SCHEDULE = int(os.environ.get("REDASH_SCHEMAS_REFRESH_SCHEDULE", 30))
|
||||
SCHEMAS_REFRESH_TIMEOUT = int(os.environ.get("REDASH_SCHEMAS_REFRESH_TIMEOUT", 300))
|
||||
|
||||
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
|
||||
INVITATION_TOKEN_MAX_AGE = int(os.environ.get("REDASH_INVITATION_TOKEN_MAX_AGE", 60 * 60 * 24 * 7))
|
||||
@@ -412,6 +413,7 @@ PAGE_SIZE_OPTIONS = list(
|
||||
TABLE_CELL_MAX_JSON_SIZE = int(os.environ.get("REDASH_TABLE_CELL_MAX_JSON_SIZE", 50000))
|
||||
|
||||
# Features:
|
||||
VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true"))
|
||||
FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
|
||||
FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
|
||||
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(
|
||||
|
||||
@@ -45,6 +45,7 @@ HIDE_PLOTLY_MODE_BAR = parse_boolean(os.environ.get("HIDE_PLOTLY_MODE_BAR", "fal
|
||||
DISABLE_PUBLIC_URLS = parse_boolean(os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false"))
|
||||
|
||||
settings = {
|
||||
"beacon_consent": None,
|
||||
"auth_password_login_enabled": PASSWORD_LOGIN_ENABLED,
|
||||
"auth_saml_enabled": SAML_LOGIN_ENABLED,
|
||||
"auth_saml_type": SAML_LOGIN_TYPE,
|
||||
|
||||
@@ -7,6 +7,7 @@ from redash.tasks.general import (
|
||||
record_event,
|
||||
send_mail,
|
||||
sync_user_details,
|
||||
version_check,
|
||||
)
|
||||
from redash.tasks.queries import (
|
||||
cleanup_query_results,
|
||||
|
||||
@@ -5,6 +5,7 @@ from redash import mail, models, settings
|
||||
from redash.models import users
|
||||
from redash.query_runner import NotSupported
|
||||
from redash.tasks.worker import Queue
|
||||
from redash.version_check import run_version_check
|
||||
from redash.worker import get_job_logger, job
|
||||
|
||||
logger = get_job_logger(__name__)
|
||||
@@ -29,6 +30,27 @@ def record_event(raw_event):
|
||||
logger.exception("Failed posting to %s", hook)
|
||||
|
||||
|
||||
def version_check():
|
||||
run_version_check()
|
||||
|
||||
|
||||
@job("default")
|
||||
def subscribe(form):
|
||||
logger.info(
|
||||
"Subscribing to: [security notifications=%s], [newsletter=%s]",
|
||||
form["security_notifications"],
|
||||
form["newsletter"],
|
||||
)
|
||||
data = {
|
||||
"admin_name": form["name"],
|
||||
"admin_email": form["email"],
|
||||
"org_name": form["org_name"],
|
||||
"security_notifications": form["security_notifications"],
|
||||
"newsletter": form["newsletter"],
|
||||
}
|
||||
requests.post("https://version.redash.io/subscribe", json=data)
|
||||
|
||||
|
||||
@job("emails")
|
||||
def send_mail(to, subject, html, text):
|
||||
try:
|
||||
@@ -50,7 +72,7 @@ def test_connection(data_source_id):
|
||||
return True
|
||||
|
||||
|
||||
@job("schemas", queue_class=Queue, at_front=True, timeout=300, ttl=90)
|
||||
@job("schemas", queue_class=Queue, at_front=True, timeout=settings.SCHEMAS_REFRESH_TIMEOUT, ttl=90)
|
||||
def get_schema(data_source_id, refresh):
|
||||
try:
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
|
||||
import redis
|
||||
from rq import get_current_job
|
||||
@@ -145,6 +147,30 @@ def _resolve_user(user_id, is_api_key, query_id):
|
||||
return None
|
||||
|
||||
|
||||
def _get_size_iterative(dict_obj):
|
||||
"""Iteratively finds size of objects in bytes"""
|
||||
seen = set()
|
||||
size = 0
|
||||
objects = deque([dict_obj])
|
||||
|
||||
while objects:
|
||||
current = objects.popleft()
|
||||
if id(current) in seen:
|
||||
continue
|
||||
seen.add(id(current))
|
||||
size += sys.getsizeof(current)
|
||||
|
||||
if isinstance(current, dict):
|
||||
objects.extend(current.keys())
|
||||
objects.extend(current.values())
|
||||
elif hasattr(current, "__dict__"):
|
||||
objects.append(current.__dict__)
|
||||
elif hasattr(current, "__iter__") and not isinstance(current, (str, bytes, bytearray)):
|
||||
objects.extend(current)
|
||||
|
||||
return size
|
||||
|
||||
|
||||
class QueryExecutor:
|
||||
def __init__(self, query, data_source_id, user_id, is_api_key, metadata, is_scheduled_query):
|
||||
self.job = get_current_job()
|
||||
@@ -195,7 +221,7 @@ class QueryExecutor:
|
||||
"job=execute_query query_hash=%s ds_id=%d data_length=%s error=[%s]",
|
||||
self.query_hash,
|
||||
self.data_source_id,
|
||||
data and len(data),
|
||||
data and _get_size_iterative(data),
|
||||
error,
|
||||
)
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@ def remove_ghost_locks():
|
||||
logger.info("Locks found: {}, Locks removed: {}".format(len(locks), count))
|
||||
|
||||
|
||||
@job("schemas")
|
||||
@job("schemas", timeout=settings.SCHEMAS_REFRESH_TIMEOUT)
|
||||
def refresh_schema(data_source_id):
|
||||
ds = models.DataSource.get_by_id(data_source_id)
|
||||
logger.info("task=refresh_schema state=start ds_id=%s", ds.id)
|
||||
|
||||
@@ -8,7 +8,7 @@ from rq_scheduler import Scheduler
|
||||
|
||||
from redash import rq_redis_connection, settings
|
||||
from redash.tasks.failure_report import send_aggregated_errors
|
||||
from redash.tasks.general import sync_user_details
|
||||
from redash.tasks.general import sync_user_details, version_check
|
||||
from redash.tasks.queries import (
|
||||
cleanup_query_results,
|
||||
empty_schedules,
|
||||
@@ -79,6 +79,9 @@ def periodic_job_definitions():
|
||||
},
|
||||
]
|
||||
|
||||
if settings.VERSION_CHECK:
|
||||
jobs.append({"func": version_check, "interval": timedelta(days=1)})
|
||||
|
||||
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
|
||||
jobs.append({"func": cleanup_query_results, "interval": timedelta(minutes=5)})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import sys
|
||||
from rq import Queue as BaseQueue
|
||||
from rq.job import Job as BaseJob
|
||||
from rq.job import JobStatus
|
||||
from rq.timeouts import HorseMonitorTimeoutException, UnixSignalDeathPenalty
|
||||
from rq.timeouts import HorseMonitorTimeoutException
|
||||
from rq.utils import utcnow
|
||||
from rq.worker import (
|
||||
HerokuWorker, # HerokuWorker implements graceful shutdown on SIGTERM
|
||||
@@ -113,30 +113,44 @@ class HardLimitingWorker(BaseWorker):
|
||||
)
|
||||
self.kill_horse()
|
||||
|
||||
def monitor_work_horse(self, job, queue):
|
||||
def monitor_work_horse(self, job: "Job", queue: "Queue"):
|
||||
"""The worker will monitor the work horse and make sure that it
|
||||
either executes successfully or the status of the job is set to
|
||||
failed
|
||||
|
||||
Args:
|
||||
job (Job): _description_
|
||||
queue (Queue): _description_
|
||||
"""
|
||||
self.monitor_started = utcnow()
|
||||
retpid = ret_val = rusage = None
|
||||
job.started_at = utcnow()
|
||||
while True:
|
||||
try:
|
||||
with UnixSignalDeathPenalty(self.job_monitoring_interval, HorseMonitorTimeoutException):
|
||||
retpid, ret_val = os.waitpid(self._horse_pid, 0)
|
||||
with self.death_penalty_class(self.job_monitoring_interval, HorseMonitorTimeoutException):
|
||||
retpid, ret_val, rusage = self.wait_for_horse()
|
||||
break
|
||||
except HorseMonitorTimeoutException:
|
||||
# Horse has not exited yet and is still running.
|
||||
# Send a heartbeat to keep the worker alive.
|
||||
self.heartbeat(self.job_monitoring_interval + 5)
|
||||
self.set_current_job_working_time((utcnow() - job.started_at).total_seconds())
|
||||
|
||||
job.refresh()
|
||||
# Kill the job from this side if something is really wrong (interpreter lock/etc).
|
||||
if job.timeout != -1 and self.current_job_working_time > (job.timeout + 60): # type: ignore
|
||||
self.heartbeat(self.job_monitoring_interval + 60)
|
||||
self.kill_horse()
|
||||
self.wait_for_horse()
|
||||
break
|
||||
|
||||
self.maintain_heartbeats(job)
|
||||
|
||||
if job.is_cancelled:
|
||||
self.stop_executing_job(job)
|
||||
|
||||
if self.soft_limit_exceeded(job):
|
||||
self.enforce_hard_limit(job)
|
||||
|
||||
except OSError as e:
|
||||
# In case we encountered an OSError due to EINTR (which is
|
||||
# caused by a SIGINT or SIGTERM signal during
|
||||
@@ -149,29 +163,32 @@ class HardLimitingWorker(BaseWorker):
|
||||
# Send a heartbeat to keep the worker alive.
|
||||
self.heartbeat()
|
||||
|
||||
self.set_current_job_working_time(0)
|
||||
self._horse_pid = 0 # Set horse PID to 0, horse has finished working
|
||||
if ret_val == os.EX_OK: # The process exited normally.
|
||||
return
|
||||
|
||||
job_status = job.get_status()
|
||||
|
||||
if job_status is None: # Job completed and its ttl has expired
|
||||
return
|
||||
if job_status not in [JobStatus.FINISHED, JobStatus.FAILED]:
|
||||
elif self._stopped_job_id == job.id:
|
||||
# Work-horse killed deliberately
|
||||
self.log.warning("Job stopped by user, moving job to FailedJobRegistry")
|
||||
if job.stopped_callback:
|
||||
job.execute_stopped_callback(self.death_penalty_class)
|
||||
self.handle_job_failure(job, queue=queue, exc_string="Job stopped by user, work-horse terminated.")
|
||||
elif job_status not in [JobStatus.FINISHED, JobStatus.FAILED]:
|
||||
if not job.ended_at:
|
||||
job.ended_at = utcnow()
|
||||
|
||||
# Unhandled failure: move the job to the failed queue
|
||||
self.log.warning(
|
||||
(
|
||||
"Moving job to FailedJobRegistry "
|
||||
"(work-horse terminated unexpectedly; waitpid returned {})" # fmt: skip
|
||||
).format(ret_val)
|
||||
)
|
||||
signal_msg = f" (signal {os.WTERMSIG(ret_val)})" if ret_val and os.WIFSIGNALED(ret_val) else ""
|
||||
exc_string = f"Work-horse terminated unexpectedly; waitpid returned {ret_val}{signal_msg}; "
|
||||
self.log.warning("Moving job to FailedJobRegistry (%s)", exc_string)
|
||||
|
||||
self.handle_job_failure(
|
||||
job,
|
||||
queue=queue,
|
||||
exc_string="Work-horse process was terminated unexpectedly "
|
||||
"(waitpid returned %s)" % ret_val, # fmt: skip
|
||||
)
|
||||
self.handle_work_horse_killed(job, retpid, ret_val, rusage)
|
||||
self.handle_job_failure(job, queue=queue, exc_string=exc_string)
|
||||
|
||||
|
||||
class RedashWorker(StatsdRecordingWorker, HardLimitingWorker):
|
||||
|
||||
@@ -42,6 +42,20 @@
|
||||
{{ render_field(form.email) }}
|
||||
{{ render_field(form.password) }}
|
||||
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
{{ form.security_notifications() }}
|
||||
Subscribe to Security Notifications
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
{{ form.newsletter() }}
|
||||
Subscribe to newsletter (version updates, no more than once a month)
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<h4 class="m-t-25">General</h4>
|
||||
|
||||
{{ render_field(form.org_name, help_block="Used in email notifications and the UI.") }}
|
||||
|
||||
@@ -6,6 +6,7 @@ import decimal
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
@@ -120,6 +121,17 @@ def json_loads(data, *args, **kwargs):
|
||||
return json.loads(data, *args, **kwargs)
|
||||
|
||||
|
||||
# Convert NaN, Inf, and -Inf to None, as they are not valid JSON values.
|
||||
def _sanitize_data(data):
|
||||
if isinstance(data, dict):
|
||||
return {k: _sanitize_data(v) for k, v in data.items()}
|
||||
if isinstance(data, list):
|
||||
return [_sanitize_data(v) for v in data]
|
||||
if isinstance(data, float) and (math.isnan(data) or math.isinf(data)):
|
||||
return None
|
||||
return data
|
||||
|
||||
|
||||
def json_dumps(data, *args, **kwargs):
|
||||
"""A custom JSON dumping function which passes all parameters to the
|
||||
json.dumps function."""
|
||||
@@ -128,7 +140,7 @@ def json_dumps(data, *args, **kwargs):
|
||||
# Float value nan or inf in Python should be render to None or null in json.
|
||||
# Using allow_nan = True will make Python render nan as NaN, leading to parse error in front-end
|
||||
kwargs.setdefault("allow_nan", False)
|
||||
return json.dumps(data, *args, **kwargs)
|
||||
return json.dumps(_sanitize_data(data), *args, **kwargs)
|
||||
|
||||
|
||||
def mustache_render(template, context=None, **kwargs):
|
||||
|
||||
@@ -33,7 +33,7 @@ from sqlalchemy.orm import mapperlib
|
||||
from sqlalchemy.orm.properties import ColumnProperty
|
||||
from sqlalchemy.orm.query import _ColumnEntity
|
||||
from sqlalchemy.orm.util import AliasedInsp
|
||||
from sqlalchemy.sql.expression import asc, desc
|
||||
from sqlalchemy.sql.expression import asc, desc, nullslast
|
||||
|
||||
|
||||
def get_query_descriptor(query, entity, attr):
|
||||
@@ -225,7 +225,7 @@ class QuerySorter:
|
||||
def assign_order_by(self, entity, attr, func):
|
||||
expr = get_query_descriptor(self.query, entity, attr)
|
||||
if expr is not None:
|
||||
return self.query.order_by(func(expr))
|
||||
return self.query.order_by(nullslast(func(expr)))
|
||||
if not self.silent:
|
||||
raise QuerySorterException("Could not sort query with expression '%s'" % attr)
|
||||
return self.query
|
||||
|
||||
103
redash/version_check.py
Normal file
103
redash/version_check.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import logging
|
||||
|
||||
import requests
|
||||
import semver
|
||||
|
||||
from redash import __version__ as current_version
|
||||
from redash import redis_connection
|
||||
from redash.models import Organization, db
|
||||
|
||||
REDIS_KEY = "new_version_available"
|
||||
|
||||
|
||||
def usage_data():
|
||||
counts_query = """
|
||||
SELECT 'users_count' as name, count(0) as value
|
||||
FROM users
|
||||
WHERE disabled_at is null
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'queries_count' as name, count(0) as value
|
||||
FROM queries
|
||||
WHERE is_archived is false
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'alerts_count' as name, count(0) as value
|
||||
FROM alerts
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'dashboards_count' as name, count(0) as value
|
||||
FROM dashboards
|
||||
WHERE is_archived is false
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'widgets_count' as name, count(0) as value
|
||||
FROM widgets
|
||||
WHERE visualization_id is not null
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'textbox_count' as name, count(0) as value
|
||||
FROM widgets
|
||||
WHERE visualization_id is null
|
||||
"""
|
||||
|
||||
data_sources_query = "SELECT type, count(0) FROM data_sources GROUP by 1"
|
||||
visualizations_query = "SELECT type, count(0) FROM visualizations GROUP by 1"
|
||||
destinations_query = "SELECT type, count(0) FROM notification_destinations GROUP by 1"
|
||||
|
||||
data = {name: value for (name, value) in db.session.execute(counts_query)}
|
||||
data["data_sources"] = {name: value for (name, value) in db.session.execute(data_sources_query)}
|
||||
data["visualization_types"] = {name: value for (name, value) in db.session.execute(visualizations_query)}
|
||||
data["destination_types"] = {name: value for (name, value) in db.session.execute(destinations_query)}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def run_version_check():
|
||||
logging.info("Performing version check.")
|
||||
logging.info("Current version: %s", current_version)
|
||||
|
||||
data = {"current_version": current_version}
|
||||
|
||||
if Organization.query.first().get_setting("beacon_consent"):
|
||||
data["usage"] = usage_data()
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
"https://version.redash.io/api/report?channel=stable",
|
||||
json=data,
|
||||
timeout=3.0,
|
||||
)
|
||||
latest_version = response.json()["release"]["version"]
|
||||
|
||||
_compare_and_update(latest_version)
|
||||
except requests.RequestException:
|
||||
logging.exception("Failed checking for new version.")
|
||||
except (ValueError, KeyError):
|
||||
logging.exception("Failed checking for new version (probably bad/non-JSON response).")
|
||||
|
||||
|
||||
def reset_new_version_status():
|
||||
latest_version = get_latest_version()
|
||||
if latest_version:
|
||||
_compare_and_update(latest_version)
|
||||
|
||||
|
||||
def get_latest_version():
|
||||
return redis_connection.get(REDIS_KEY)
|
||||
|
||||
|
||||
def _compare_and_update(latest_version):
|
||||
# TODO: support alpha channel (allow setting which channel to check & parse build number)
|
||||
is_newer = semver.compare(current_version, latest_version) == -1
|
||||
logging.info("Latest version: %s (newer: %s)", latest_version, is_newer)
|
||||
|
||||
if is_newer:
|
||||
redis_connection.set(REDIS_KEY, latest_version)
|
||||
else:
|
||||
redis_connection.delete(REDIS_KEY)
|
||||
@@ -1,4 +1,9 @@
|
||||
import datetime
|
||||
|
||||
from mock import patch
|
||||
|
||||
from redash.models import Alert, AlertSubscription, db
|
||||
from redash.utils import utcnow
|
||||
from tests import BaseTestCase
|
||||
|
||||
|
||||
@@ -39,6 +44,26 @@ class TestAlertResourcePost(BaseTestCase):
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
|
||||
|
||||
class TestAlertEvaluateResource(BaseTestCase):
|
||||
@patch("redash.handlers.alerts.notify_subscriptions")
|
||||
def test_evaluates_alert_and_notifies(self, mock_notify_subscriptions):
|
||||
query = self.factory.create_query(
|
||||
data_source=self.factory.create_data_source(group=self.factory.create_group())
|
||||
)
|
||||
retrieved_at = utcnow() - datetime.timedelta(days=1)
|
||||
query_result = self.factory.create_query_result(
|
||||
retrieved_at=retrieved_at,
|
||||
query_text=query.query_text,
|
||||
query_hash=query.query_hash,
|
||||
)
|
||||
query.latest_query_data = query_result
|
||||
alert = self.factory.create_alert(query_rel=query)
|
||||
rv = self.make_request("post", "/api/alerts/{}/eval".format(alert.id))
|
||||
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
mock_notify_subscriptions.assert_called()
|
||||
|
||||
|
||||
class TestAlertResourceDelete(BaseTestCase):
|
||||
def test_removes_alert_and_subscriptions(self):
|
||||
subscription = self.factory.create_alert_subscription()
|
||||
|
||||
@@ -261,15 +261,19 @@ def test_webex_notify_calls_requests_post():
|
||||
alert.name = "Test Alert"
|
||||
alert.custom_subject = "Test custom subject"
|
||||
alert.custom_body = "Test custom body"
|
||||
|
||||
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||
|
||||
query = mock.Mock()
|
||||
query.id = 1
|
||||
|
||||
user = mock.Mock()
|
||||
app = mock.Mock()
|
||||
host = "https://localhost:5000"
|
||||
options = {"webex_bot_token": "abcd", "to_room_ids": "1234"}
|
||||
options = {
|
||||
"webex_bot_token": "abcd",
|
||||
"to_room_ids": "1234,5678",
|
||||
"to_person_emails": "example1@test.com,example2@test.com",
|
||||
}
|
||||
metadata = {"Scheduled": False}
|
||||
|
||||
new_state = Alert.TRIGGERED_STATE
|
||||
@@ -277,7 +281,7 @@ def test_webex_notify_calls_requests_post():
|
||||
|
||||
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||
mock_response = mock.Mock()
|
||||
mock_response.status_code = 204
|
||||
mock_response.status_code = 200
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||
@@ -285,13 +289,111 @@ def test_webex_notify_calls_requests_post():
|
||||
query_link = f"{host}/queries/{query.id}"
|
||||
alert_link = f"{host}/alerts/{alert.id}"
|
||||
|
||||
formatted_attachments = Webex.formatted_attachments_template(
|
||||
expected_attachments = Webex.formatted_attachments_template(
|
||||
alert.custom_subject, alert.custom_body, query_link, alert_link
|
||||
)
|
||||
|
||||
expected_payload_room = {
|
||||
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||
"attachments": expected_attachments,
|
||||
"roomId": "1234",
|
||||
}
|
||||
|
||||
expected_payload_email = {
|
||||
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||
"attachments": expected_attachments,
|
||||
"toPersonEmail": "example1@test.com",
|
||||
}
|
||||
|
||||
# Check that requests.post was called for both roomId and toPersonEmail destinations
|
||||
mock_post.assert_any_call(
|
||||
destination.api_base_url,
|
||||
json=expected_payload_room,
|
||||
headers={"Authorization": "Bearer abcd"},
|
||||
timeout=5.0,
|
||||
)
|
||||
|
||||
mock_post.assert_any_call(
|
||||
destination.api_base_url,
|
||||
json=expected_payload_email,
|
||||
headers={"Authorization": "Bearer abcd"},
|
||||
timeout=5.0,
|
||||
)
|
||||
|
||||
assert mock_response.status_code == 200
|
||||
|
||||
|
||||
def test_webex_notify_handles_blank_entries():
|
||||
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||
alert.id = 1
|
||||
alert.name = "Test Alert"
|
||||
alert.custom_subject = "Test custom subject"
|
||||
alert.custom_body = "Test custom body"
|
||||
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||
|
||||
query = mock.Mock()
|
||||
query.id = 1
|
||||
|
||||
user = mock.Mock()
|
||||
app = mock.Mock()
|
||||
host = "https://localhost:5000"
|
||||
options = {
|
||||
"webex_bot_token": "abcd",
|
||||
"to_room_ids": "",
|
||||
"to_person_emails": "",
|
||||
}
|
||||
metadata = {"Scheduled": False}
|
||||
|
||||
new_state = Alert.TRIGGERED_STATE
|
||||
destination = Webex(options)
|
||||
|
||||
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||
|
||||
# Ensure no API calls are made when destinations are blank
|
||||
mock_post.assert_not_called()
|
||||
|
||||
|
||||
def test_webex_notify_handles_2d_array():
|
||||
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||
alert.id = 1
|
||||
alert.name = "Test Alert"
|
||||
alert.custom_subject = "Test custom subject"
|
||||
alert.custom_body = "Test custom body with table [['Col1', 'Col2'], ['Val1', 'Val2']]"
|
||||
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||
|
||||
query = mock.Mock()
|
||||
query.id = 1
|
||||
|
||||
user = mock.Mock()
|
||||
app = mock.Mock()
|
||||
host = "https://localhost:5000"
|
||||
options = {
|
||||
"webex_bot_token": "abcd",
|
||||
"to_room_ids": "1234",
|
||||
}
|
||||
metadata = {"Scheduled": False}
|
||||
|
||||
new_state = Alert.TRIGGERED_STATE
|
||||
destination = Webex(options)
|
||||
|
||||
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||
mock_response = mock.Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||
|
||||
query_link = f"{host}/queries/{query.id}"
|
||||
alert_link = f"{host}/alerts/{alert.id}"
|
||||
|
||||
expected_attachments = Webex.formatted_attachments_template(
|
||||
alert.custom_subject, alert.custom_body, query_link, alert_link
|
||||
)
|
||||
|
||||
expected_payload = {
|
||||
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||
"attachments": formatted_attachments,
|
||||
"attachments": expected_attachments,
|
||||
"roomId": "1234",
|
||||
}
|
||||
|
||||
@@ -302,7 +404,60 @@ def test_webex_notify_calls_requests_post():
|
||||
timeout=5.0,
|
||||
)
|
||||
|
||||
assert mock_response.status_code == 204
|
||||
assert mock_response.status_code == 200
|
||||
|
||||
|
||||
def test_webex_notify_handles_1d_array():
|
||||
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||
alert.id = 1
|
||||
alert.name = "Test Alert"
|
||||
alert.custom_subject = "Test custom subject"
|
||||
alert.custom_body = "Test custom body with 1D array, however unlikely ['Col1', 'Col2']"
|
||||
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||
|
||||
query = mock.Mock()
|
||||
query.id = 1
|
||||
|
||||
user = mock.Mock()
|
||||
app = mock.Mock()
|
||||
host = "https://localhost:5000"
|
||||
options = {
|
||||
"webex_bot_token": "abcd",
|
||||
"to_room_ids": "1234",
|
||||
}
|
||||
metadata = {"Scheduled": False}
|
||||
|
||||
new_state = Alert.TRIGGERED_STATE
|
||||
destination = Webex(options)
|
||||
|
||||
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||
mock_response = mock.Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||
|
||||
query_link = f"{host}/queries/{query.id}"
|
||||
alert_link = f"{host}/alerts/{alert.id}"
|
||||
|
||||
expected_attachments = Webex.formatted_attachments_template(
|
||||
alert.custom_subject, alert.custom_body, query_link, alert_link
|
||||
)
|
||||
|
||||
expected_payload = {
|
||||
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||
"attachments": expected_attachments,
|
||||
"roomId": "1234",
|
||||
}
|
||||
|
||||
mock_post.assert_called_once_with(
|
||||
destination.api_base_url,
|
||||
json=expected_payload,
|
||||
headers={"Authorization": "Bearer abcd"},
|
||||
timeout=5.0,
|
||||
)
|
||||
|
||||
assert mock_response.status_code == 200
|
||||
|
||||
|
||||
def test_datadog_notify_calls_requests_post():
|
||||
|
||||
@@ -49,7 +49,9 @@ class TestAlertEvaluate(BaseTestCase):
|
||||
def create_alert(self, results, column="foo", value="1"):
|
||||
result = self.factory.create_query_result(data=results)
|
||||
query = self.factory.create_query(latest_query_data_id=result.id)
|
||||
alert = self.factory.create_alert(query_rel=query, options={"op": "equals", "column": column, "value": value})
|
||||
alert = self.factory.create_alert(
|
||||
query_rel=query, options={"selector": "first", "op": "equals", "column": column, "value": value}
|
||||
)
|
||||
return alert
|
||||
|
||||
def test_evaluate_triggers_alert_when_equal(self):
|
||||
@@ -69,6 +71,57 @@ class TestAlertEvaluate(BaseTestCase):
|
||||
alert = self.create_alert(results)
|
||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||
|
||||
def test_evaluates_correctly_with_first_selector(self):
|
||||
results = {"rows": [{"foo": 1}, {"foo": 2}], "columns": [{"name": "foo", "type": "INTEGER"}]}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "first"
|
||||
self.assertEqual(alert.evaluate(), Alert.TRIGGERED_STATE)
|
||||
results = {
|
||||
"rows": [{"foo": "test"}, {"foo": "test"}, {"foo": "test"}],
|
||||
"columns": [{"name": "foo", "type": "STRING"}],
|
||||
}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "first"
|
||||
alert.options["op"] = "<"
|
||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||
|
||||
def test_evaluates_correctly_with_min_selector(self):
|
||||
results = {"rows": [{"foo": 2}, {"foo": 1}], "columns": [{"name": "foo", "type": "INTEGER"}]}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "min"
|
||||
self.assertEqual(alert.evaluate(), Alert.TRIGGERED_STATE)
|
||||
results = {
|
||||
"rows": [{"foo": "test"}, {"foo": "test"}, {"foo": "test"}],
|
||||
"columns": [{"name": "foo", "type": "STRING"}],
|
||||
}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "min"
|
||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||
|
||||
def test_evaluates_correctly_with_max_selector(self):
|
||||
results = {"rows": [{"foo": 1}, {"foo": 2}], "columns": [{"name": "foo", "type": "INTEGER"}]}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "max"
|
||||
self.assertEqual(alert.evaluate(), Alert.OK_STATE)
|
||||
results = {
|
||||
"rows": [{"foo": "test"}, {"foo": "test"}, {"foo": "test"}],
|
||||
"columns": [{"name": "foo", "type": "STRING"}],
|
||||
}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "max"
|
||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||
|
||||
def test_evaluate_alerts_without_query_rel(self):
|
||||
query = self.factory.create_query(latest_query_data_id=None)
|
||||
alert = self.factory.create_alert(
|
||||
query_rel=query, options={"selector": "first", "op": "equals", "column": "foo", "value": "1"}
|
||||
)
|
||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||
|
||||
def test_evaluate_return_unknown_when_value_is_none(self):
|
||||
alert = self.create_alert(get_results(None))
|
||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||
|
||||
|
||||
class TestNextState(TestCase):
|
||||
def test_numeric_value(self):
|
||||
@@ -94,7 +147,9 @@ class TestAlertRenderTemplate(BaseTestCase):
|
||||
def create_alert(self, results, column="foo", value="5"):
|
||||
result = self.factory.create_query_result(data=results)
|
||||
query = self.factory.create_query(latest_query_data_id=result.id)
|
||||
alert = self.factory.create_alert(query_rel=query, options={"op": "equals", "column": column, "value": value})
|
||||
alert = self.factory.create_alert(
|
||||
query_rel=query, options={"selector": "first", "op": "equals", "column": column, "value": value}
|
||||
)
|
||||
return alert
|
||||
|
||||
def test_render_custom_alert_template(self):
|
||||
@@ -102,6 +157,7 @@ class TestAlertRenderTemplate(BaseTestCase):
|
||||
custom_alert = """
|
||||
<pre>
|
||||
ALERT_STATUS {{ALERT_STATUS}}
|
||||
ALERT_SELECTOR {{ALERT_SELECTOR}}
|
||||
ALERT_CONDITION {{ALERT_CONDITION}}
|
||||
ALERT_THRESHOLD {{ALERT_THRESHOLD}}
|
||||
ALERT_NAME {{ALERT_NAME}}
|
||||
@@ -116,6 +172,7 @@ class TestAlertRenderTemplate(BaseTestCase):
|
||||
expected = """
|
||||
<pre>
|
||||
ALERT_STATUS UNKNOWN
|
||||
ALERT_SELECTOR first
|
||||
ALERT_CONDITION equals
|
||||
ALERT_THRESHOLD 5
|
||||
ALERT_NAME %s
|
||||
|
||||
@@ -75,7 +75,9 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [{"columns": ["row_id"], "name": "test1.jdbc_table"}]
|
||||
assert query_runner.get_schema() == [
|
||||
{"columns": [{"name": "row_id", "type": "int"}], "name": "test1.jdbc_table"}
|
||||
]
|
||||
|
||||
def test_partitioned_table(self):
|
||||
"""
|
||||
@@ -124,7 +126,12 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [{"columns": ["sk", "category"], "name": "test1.partitioned_table"}]
|
||||
assert query_runner.get_schema() == [
|
||||
{
|
||||
"columns": [{"name": "sk", "type": "int"}, {"name": "category", "type": "int"}],
|
||||
"name": "test1.partitioned_table",
|
||||
}
|
||||
]
|
||||
|
||||
def test_view(self):
|
||||
query_runner = Athena({"glue": True, "region": "mars-east-1"})
|
||||
@@ -156,7 +163,7 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [{"columns": ["sk"], "name": "test1.view"}]
|
||||
assert query_runner.get_schema() == [{"columns": [{"name": "sk", "type": "int"}], "name": "test1.view"}]
|
||||
|
||||
def test_dodgy_table_does_not_break_schema_listing(self):
|
||||
"""
|
||||
@@ -196,7 +203,9 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [{"columns": ["region"], "name": "test1.csv"}]
|
||||
assert query_runner.get_schema() == [
|
||||
{"columns": [{"name": "region", "type": "string"}], "name": "test1.csv"}
|
||||
]
|
||||
|
||||
def test_no_storage_descriptor_table(self):
|
||||
"""
|
||||
@@ -312,6 +321,6 @@ class TestGlueSchema(TestCase):
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [
|
||||
{"columns": ["row_id"], "name": "test1.jdbc_table"},
|
||||
{"columns": ["row_id"], "name": "test2.jdbc_table"},
|
||||
{"columns": [{"name": "row_id", "type": "int"}], "name": "test1.jdbc_table"},
|
||||
{"columns": [{"name": "row_id", "type": "int"}], "name": "test2.jdbc_table"},
|
||||
]
|
||||
|
||||
42
tests/query_runner/test_azure_kusto.py
Normal file
42
tests/query_runner/test_azure_kusto.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from unittest import TestCase
|
||||
from unittest.mock import patch
|
||||
|
||||
from redash.query_runner.azure_kusto import AzureKusto
|
||||
|
||||
|
||||
class TestAzureKusto(TestCase):
|
||||
def setUp(self):
|
||||
self.configuration = {
|
||||
"cluster": "https://example.kusto.windows.net",
|
||||
"database": "sample_db",
|
||||
"azure_ad_client_id": "client_id",
|
||||
"azure_ad_client_secret": "client_secret",
|
||||
"azure_ad_tenant_id": "tenant_id",
|
||||
}
|
||||
self.kusto = AzureKusto(self.configuration)
|
||||
|
||||
@patch.object(AzureKusto, "run_query")
|
||||
def test_get_schema(self, mock_run_query):
|
||||
mock_response = {
|
||||
"rows": [
|
||||
{
|
||||
"DatabaseSchema": '{"Databases":{"sample_db":{"Tables":{"Table1":{"Name":"Table1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}},"MaterializedViews":{"View1":{"Name":"View1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}}}}}'
|
||||
}
|
||||
]
|
||||
}
|
||||
mock_run_query.return_value = (mock_response, None)
|
||||
|
||||
expected_schema = [
|
||||
{
|
||||
"name": "Table1",
|
||||
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
|
||||
},
|
||||
{
|
||||
"name": "View1",
|
||||
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
|
||||
},
|
||||
]
|
||||
|
||||
schema = self.kusto.get_schema()
|
||||
print(schema)
|
||||
self.assertEqual(schema, expected_schema)
|
||||
@@ -5,6 +5,7 @@ from freezegun import freeze_time
|
||||
from mock import patch
|
||||
from pytz import utc
|
||||
|
||||
from redash.query_runner import TYPE_INTEGER, TYPE_STRING
|
||||
from redash.query_runner.mongodb import (
|
||||
MongoDB,
|
||||
_get_column_by_name,
|
||||
@@ -15,7 +16,7 @@ from redash.utils import json_dumps, parse_human_time
|
||||
|
||||
|
||||
@patch("redash.query_runner.mongodb.pymongo.MongoClient")
|
||||
class TestUserPassOverride(TestCase):
|
||||
class TestMongoDB(TestCase):
|
||||
def test_username_password_present_overrides_username_from_uri(self, mongo_client):
|
||||
config = {
|
||||
"connectionString": "mongodb://localhost:27017/test",
|
||||
@@ -37,6 +38,73 @@ class TestUserPassOverride(TestCase):
|
||||
self.assertNotIn("username", mongo_client.call_args.kwargs)
|
||||
self.assertNotIn("password", mongo_client.call_args.kwargs)
|
||||
|
||||
def test_run_query_with_fields(self, mongo_client):
|
||||
query = {"collection": "test", "query": {"age": 10}, "fields": {"_id": 1, "name": 2}}
|
||||
return_value = [{"_id": "6569ee53d53db7930aaa0cc0", "name": "test2"}]
|
||||
expected = {
|
||||
"columns": [
|
||||
{"name": "_id", "friendly_name": "_id", "type": TYPE_STRING},
|
||||
{"name": "name", "friendly_name": "name", "type": TYPE_STRING},
|
||||
],
|
||||
"rows": return_value,
|
||||
}
|
||||
|
||||
mongo_client().__getitem__().__getitem__().find.return_value = return_value
|
||||
self._test_query(query, return_value, expected)
|
||||
|
||||
def test_run_query_with_func(self, mongo_client):
|
||||
query = {
|
||||
"collection": "test",
|
||||
"query": {"age": 10},
|
||||
"fields": {"_id": 1, "name": 4, "link": {"$concat": ["hoge_", "$name"]}},
|
||||
}
|
||||
return_value = [{"_id": "6569ee53d53db7930aaa0cc0", "name": "test2", "link": "hoge_test2"}]
|
||||
expected = {
|
||||
"columns": [
|
||||
{"name": "_id", "friendly_name": "_id", "type": TYPE_STRING},
|
||||
{"name": "link", "friendly_name": "link", "type": TYPE_STRING},
|
||||
{"name": "name", "friendly_name": "name", "type": TYPE_STRING},
|
||||
],
|
||||
"rows": return_value,
|
||||
}
|
||||
|
||||
mongo_client().__getitem__().__getitem__().find.return_value = return_value
|
||||
self._test_query(query, return_value, expected)
|
||||
|
||||
def test_run_query_with_aggregate(self, mongo_client):
|
||||
query = {
|
||||
"collection": "test",
|
||||
"aggregate": [
|
||||
{"$unwind": "$tags"},
|
||||
{"$group": {"_id": "$tags", "count": {"$sum": 1}}},
|
||||
{"$sort": [{"name": "count", "direction": -1}, {"name": "_id", "direction": -1}]},
|
||||
],
|
||||
}
|
||||
return_value = [{"_id": "foo", "count": 10}, {"_id": "bar", "count": 9}]
|
||||
expected = {
|
||||
"columns": [
|
||||
{"name": "_id", "friendly_name": "_id", "type": TYPE_STRING},
|
||||
{"name": "count", "friendly_name": "count", "type": TYPE_INTEGER},
|
||||
],
|
||||
"rows": return_value,
|
||||
}
|
||||
|
||||
mongo_client().__getitem__().__getitem__().aggregate.return_value = return_value
|
||||
self._test_query(query, return_value, expected)
|
||||
|
||||
def _test_query(self, query, return_value, expected):
|
||||
config = {
|
||||
"connectionString": "mongodb://localhost:27017/test",
|
||||
"username": "test_user",
|
||||
"password": "test_pass",
|
||||
"dbName": "test",
|
||||
}
|
||||
mongo_qr = MongoDB(config)
|
||||
|
||||
result, err = mongo_qr.run_query(json_dumps(query), None)
|
||||
self.assertIsNone(err)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
|
||||
class TestParseQueryJson(TestCase):
|
||||
def test_ignores_non_isodate_fields(self):
|
||||
@@ -130,6 +198,7 @@ class TestMongoResults(TestCase):
|
||||
for i, row in enumerate(rows):
|
||||
self.assertDictEqual(row, raw_results[i])
|
||||
|
||||
self.assertEqual(3, len(columns))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column2"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column3"))
|
||||
|
||||
@@ -25,3 +25,19 @@ class TestBuildSchema(TestCase):
|
||||
self.assertListEqual(schema["main.users"]["columns"], ["id", "name"])
|
||||
self.assertIn('public."main.users"', schema.keys())
|
||||
self.assertListEqual(schema['public."main.users"']["columns"], ["id"])
|
||||
|
||||
def test_build_schema_with_data_types(self):
|
||||
results = {
|
||||
"rows": [
|
||||
{"table_schema": "main", "table_name": "users", "column_name": "id", "data_type": "integer"},
|
||||
{"table_schema": "main", "table_name": "users", "column_name": "name", "data_type": "varchar"},
|
||||
]
|
||||
}
|
||||
|
||||
schema = {}
|
||||
|
||||
build_schema(results, schema)
|
||||
|
||||
self.assertListEqual(
|
||||
schema["main.users"]["columns"], [{"name": "id", "type": "integer"}, {"name": "name", "type": "varchar"}]
|
||||
)
|
||||
|
||||
@@ -216,6 +216,20 @@ class QueryOutdatedQueriesTest(BaseTestCase):
|
||||
|
||||
self.assertEqual(list(models.Query.outdated_queries()), [query2])
|
||||
|
||||
def test_enqueues_scheduled_query_without_latest_query_data(self):
|
||||
"""
|
||||
Queries with a schedule but no latest_query_data will still be reported by Query.outdated_queries()
|
||||
"""
|
||||
query = self.factory.create_query(
|
||||
schedule=self.schedule(interval="60"),
|
||||
data_source=self.factory.create_data_source(),
|
||||
)
|
||||
|
||||
outdated_queries = models.Query.outdated_queries()
|
||||
self.assertEqual(query.latest_query_data, None)
|
||||
self.assertEqual(len(outdated_queries), 1)
|
||||
self.assertIn(query, outdated_queries)
|
||||
|
||||
def test_enqueues_query_with_correct_data_source(self):
|
||||
"""
|
||||
Queries from different data sources will be reported by
|
||||
|
||||
23
tests/test_monitor.py
Normal file
23
tests/test_monitor.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from redash import rq_redis_connection
|
||||
from redash.monitor import rq_job_ids
|
||||
|
||||
|
||||
def test_rq_job_ids_uses_rq_redis_connection():
|
||||
mock_queue = MagicMock()
|
||||
mock_queue.job_ids = []
|
||||
|
||||
mock_registry = MagicMock()
|
||||
mock_registry.get_job_ids.return_value = []
|
||||
|
||||
with patch("redash.monitor.Queue") as mock_Queue, patch(
|
||||
"redash.monitor.StartedJobRegistry"
|
||||
) as mock_StartedJobRegistry:
|
||||
mock_Queue.all.return_value = [mock_queue]
|
||||
mock_StartedJobRegistry.return_value = mock_registry
|
||||
|
||||
rq_job_ids()
|
||||
|
||||
mock_Queue.all.assert_called_once_with(connection=rq_redis_connection)
|
||||
mock_StartedJobRegistry.assert_called_once_with(queue=mock_queue)
|
||||
31
tests/utils/test_json_dumps.py
Normal file
31
tests/utils/test_json_dumps.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from tests import BaseTestCase
|
||||
|
||||
|
||||
class TestJsonDumps(BaseTestCase):
|
||||
"""
|
||||
NaN, Inf, and -Inf are sanitized to None.
|
||||
"""
|
||||
|
||||
def test_data_with_nan_is_sanitized(self):
|
||||
input_data = {
|
||||
"columns": [
|
||||
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
|
||||
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
|
||||
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
|
||||
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
|
||||
],
|
||||
"rows": [{"_col0": 1.0, "_col1": float("nan"), "_col2": float("inf"), "_col3": float("-inf")}],
|
||||
}
|
||||
expected_output_data = {
|
||||
"columns": [
|
||||
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
|
||||
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
|
||||
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
|
||||
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
|
||||
],
|
||||
"rows": [{"_col0": 1.0, "_col1": None, "_col2": None, "_col3": None}],
|
||||
}
|
||||
json_data = json_dumps(input_data)
|
||||
actual_output_data = json_loads(json_data)
|
||||
self.assertEqual(actual_output_data, expected_output_data)
|
||||
@@ -46,7 +46,7 @@
|
||||
"@types/jest": "^26.0.18",
|
||||
"@types/leaflet": "^1.5.19",
|
||||
"@types/numeral": "0.0.28",
|
||||
"@types/plotly.js": "^1.54.22",
|
||||
"@types/plotly.js": "^2.35.2",
|
||||
"@types/react": "^17.0.0",
|
||||
"@types/react-dom": "^17.0.0",
|
||||
"@types/tinycolor2": "^1.4.2",
|
||||
@@ -91,7 +91,7 @@
|
||||
"leaflet.markercluster": "^1.1.0",
|
||||
"lodash": "^4.17.10",
|
||||
"numeral": "^2.0.6",
|
||||
"plotly.js": "1.58.5",
|
||||
"plotly.js": "2.35.3",
|
||||
"react-pivottable": "^0.9.0",
|
||||
"react-sortable-hoc": "^1.10.1",
|
||||
"tinycolor2": "^1.4.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { values } from "lodash";
|
||||
|
||||
// The following colors will be used if you pick "Automatic" color
|
||||
// Define color palettes
|
||||
export const BaseColors = {
|
||||
Blue: "#356AFF",
|
||||
Red: "#E92828",
|
||||
@@ -28,11 +28,78 @@ export const AdditionalColors = {
|
||||
"Pink 2": "#C63FA9",
|
||||
};
|
||||
|
||||
export const ColorPaletteArray = values(BaseColors);
|
||||
const Viridis = {
|
||||
1: '#440154',
|
||||
2: '#48186a',
|
||||
3: '#472d7b',
|
||||
4: '#424086',
|
||||
5: '#3b528b',
|
||||
6: '#33638d',
|
||||
7: '#2c728e',
|
||||
8: '#26828e',
|
||||
9: '#21918c',
|
||||
10: '#1fa088',
|
||||
11: '#28ae80',
|
||||
12: '#3fbc73',
|
||||
13: '#5ec962',
|
||||
14: '#84d44b',
|
||||
15: '#addc30',
|
||||
16: '#d8e219',
|
||||
17: '#fde725',
|
||||
};
|
||||
|
||||
const ColorPalette = {
|
||||
const Tableau = {
|
||||
1 : "#4e79a7",
|
||||
2 : "#f28e2c",
|
||||
3 : "#e15759",
|
||||
4 : "#76b7b2",
|
||||
5 : "#59a14f",
|
||||
6 : "#edc949",
|
||||
7 : "#af7aa1",
|
||||
8 : "#ff9da7",
|
||||
9 : "#9c755f",
|
||||
10 : "#bab0ab",
|
||||
}
|
||||
|
||||
const D3Category10 = {
|
||||
1 : "#1f77b4",
|
||||
2 : "#ff7f0e",
|
||||
3 : "#2ca02c",
|
||||
4 : "#d62728",
|
||||
5 : "#9467bd",
|
||||
6 : "#8c564b",
|
||||
7 : "#e377c2",
|
||||
8 : "#7f7f7f",
|
||||
9 : "#bcbd22",
|
||||
10 : "#17becf",
|
||||
}
|
||||
|
||||
let ColorPalette = {
|
||||
...BaseColors,
|
||||
...AdditionalColors,
|
||||
};
|
||||
|
||||
export const ColorPaletteArray = values(ColorPalette);
|
||||
|
||||
export default ColorPalette;
|
||||
|
||||
export const AllColorPalettes = {
|
||||
"Redash" : ColorPalette,
|
||||
"Viridis" : Viridis,
|
||||
"Tableau 10" : Tableau,
|
||||
"D3 Category 10" : D3Category10,
|
||||
}
|
||||
|
||||
export const AllColorPaletteArrays = {
|
||||
"Redash" : ColorPaletteArray,
|
||||
"Viridis" : values(Viridis),
|
||||
"Tableau 10" : values(Tableau),
|
||||
"D3 Category 10" : values(D3Category10),
|
||||
};
|
||||
|
||||
export const ColorPaletteTypes = {
|
||||
"Redash" : 'discrete',
|
||||
"Viridis" : 'continuous',
|
||||
"Tableau 10" : 'discrete',
|
||||
"D3 Category 10" : 'discrete',
|
||||
}
|
||||
|
||||
@@ -3,16 +3,18 @@ import React, { useMemo, useCallback } from "react";
|
||||
import Table from "antd/lib/table";
|
||||
import ColorPicker from "@/components/ColorPicker";
|
||||
import { EditorPropTypes } from "@/visualizations/prop-types";
|
||||
import ColorPalette from "@/visualizations/ColorPalette";
|
||||
import { AllColorPalettes } from "@/visualizations/ColorPalette";
|
||||
import getChartData from "../getChartData";
|
||||
import { Section, Select } from "@/components/visualizations/editor";
|
||||
|
||||
export default function DefaultColorsSettings({ options, data, onOptionsChange }: any) {
|
||||
const colors = useMemo(
|
||||
() => ({
|
||||
Automatic: null,
|
||||
...ColorPalette,
|
||||
// @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
|
||||
...AllColorPalettes[options.color_scheme],
|
||||
}),
|
||||
[]
|
||||
[options.color_scheme]
|
||||
);
|
||||
|
||||
const series = useMemo(
|
||||
@@ -67,8 +69,25 @@ export default function DefaultColorsSettings({ options, data, onOptionsChange }
|
||||
},
|
||||
];
|
||||
|
||||
// @ts-expect-error ts-migrate(2322) FIXME: Type 'boolean[]' is not assignable to type 'object... Remove this comment to see the full error message
|
||||
return <Table showHeader={false} dataSource={series} columns={columns} pagination={false} />;
|
||||
return (
|
||||
<React.Fragment>
|
||||
{/* @ts-expect-error ts-migrate(2745) FIXME: This JSX tag's 'children' prop expects type 'never... Remove this comment to see the full error message */}
|
||||
<Section>
|
||||
<Select
|
||||
label="Color Scheme"
|
||||
defaultValue={options.color_scheme}
|
||||
data-test="ColorScheme"
|
||||
onChange={(val : any) => onOptionsChange({ color_scheme: val })}>
|
||||
{Object.keys(AllColorPalettes).map(option => (
|
||||
// @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message
|
||||
<Select.Option data-test={`ColorOption${option}`} key={option} value={option}>{option}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Section>
|
||||
{/* @ts-expect-error ts-migrate(2322) FIXME: Type 'boolean[]' is not assignable to type 'object... Remove this comment to see the full error message */}
|
||||
<Table showHeader={false} dataSource={series} columns={columns} pagination={false} />
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
|
||||
DefaultColorsSettings.propTypes = EditorPropTypes;
|
||||
|
||||
@@ -3,7 +3,7 @@ import React, { useMemo } from "react";
|
||||
import { Section, Select, Checkbox, InputNumber, ContextHelp, Input } from "@/components/visualizations/editor";
|
||||
import { UpdateOptionsStrategy } from "@/components/visualizations/editor/createTabbedEditor";
|
||||
import { EditorPropTypes } from "@/visualizations/prop-types";
|
||||
|
||||
import { AllColorPalettes } from "@/visualizations/ColorPalette";
|
||||
import ChartTypeSelect from "./ChartTypeSelect";
|
||||
import ColumnMappingSelect from "./ColumnMappingSelect";
|
||||
import { useDebouncedCallback } from "use-debounce/lib";
|
||||
|
||||
@@ -3,8 +3,9 @@ import React, { useMemo, useCallback } from "react";
|
||||
import Table from "antd/lib/table";
|
||||
import ColorPicker from "@/components/ColorPicker";
|
||||
import { EditorPropTypes } from "@/visualizations/prop-types";
|
||||
import ColorPalette from "@/visualizations/ColorPalette";
|
||||
import { AllColorPalettes } from "@/visualizations/ColorPalette";
|
||||
import getChartData from "../getChartData";
|
||||
import { Section, Select } from "@/components/visualizations/editor";
|
||||
|
||||
function getUniqueValues(chartData: any) {
|
||||
const uniqueValuesNames = new Set();
|
||||
@@ -20,9 +21,10 @@ export default function PieColorsSettings({ options, data, onOptionsChange }: an
|
||||
const colors = useMemo(
|
||||
() => ({
|
||||
Automatic: null,
|
||||
...ColorPalette,
|
||||
// @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
|
||||
...AllColorPalettes[options.color_scheme],
|
||||
}),
|
||||
[]
|
||||
[options.color_scheme]
|
||||
);
|
||||
|
||||
const series = useMemo(
|
||||
@@ -78,7 +80,24 @@ export default function PieColorsSettings({ options, data, onOptionsChange }: an
|
||||
},
|
||||
];
|
||||
|
||||
return <Table showHeader={false} dataSource={series} columns={columns} pagination={false} />;
|
||||
return (
|
||||
<React.Fragment>
|
||||
{/* @ts-expect-error ts-migrate(2745) FIXME: This JSX tag's 'children' prop expects type 'never... Remove this comment to see the full error message */}
|
||||
<Section>
|
||||
<Select
|
||||
label="Color Scheme"
|
||||
defaultValue={options.color_scheme}
|
||||
data-test="ColorScheme"
|
||||
onChange={(val : any) => onOptionsChange({ color_scheme: val })}>
|
||||
{Object.keys(AllColorPalettes).map(option => (
|
||||
// @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message
|
||||
<Select.Option data-test={`ColorOption${option}`} key={option} value={option}>{option}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Section>
|
||||
<Table showHeader={false} dataSource={series} columns={columns} pagination={false} />
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
|
||||
PieColorsSettings.propTypes = EditorPropTypes;
|
||||
|
||||
@@ -17,6 +17,7 @@ const DEFAULT_OPTIONS = {
|
||||
sizemode: "diameter",
|
||||
coefficient: 1,
|
||||
piesort: true,
|
||||
color_scheme: "Redash",
|
||||
|
||||
// showDataLabels: false, // depends on chart type
|
||||
numberFormat: "0,0[.]00000",
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
"columnMapping": {
|
||||
"x": "x",
|
||||
"y": "y"
|
||||
}
|
||||
},
|
||||
"color_scheme": "Redash"
|
||||
},
|
||||
"data": [
|
||||
{
|
||||
@@ -47,7 +48,8 @@
|
||||
"textfont": { "color": ["#ffffff", "#ffffff", "#333333", "#ffffff"] },
|
||||
"name": "a",
|
||||
"direction": "counterclockwise",
|
||||
"domain": { "x": [0, 0.98], "y": [0, 0.9] }
|
||||
"domain": { "x": [0, 0.98], "y": [0, 0.9] },
|
||||
"color_scheme": "Redash"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user