mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Compare commits
136 Commits
24.09.0-de
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
43ee21ac20 | ||
|
|
262d46f465 | ||
|
|
bc68b1c38b | ||
|
|
4353a82c7a | ||
|
|
761eb0b68b | ||
|
|
9743820efe | ||
|
|
9d49e0457f | ||
|
|
b5781a8ebe | ||
|
|
b6f4159be9 | ||
|
|
d5fbf547cf | ||
|
|
772b160a79 | ||
|
|
bac2160e2a | ||
|
|
c5aa5da6a2 | ||
|
|
9503cc9fb8 | ||
|
|
b353057f9a | ||
|
|
8747d02bbe | ||
|
|
5b463b0d83 | ||
|
|
ea589ad477 | ||
|
|
617124850b | ||
|
|
1cc200843c | ||
|
|
e0410e2ffe | ||
|
|
7e39b3668d | ||
|
|
92f15a3ccb | ||
|
|
9a1d33381c | ||
|
|
56c06adc24 | ||
|
|
5e8915afe5 | ||
|
|
b8ebf49436 | ||
|
|
59951eda3d | ||
|
|
777153e7a0 | ||
|
|
47b1309f13 | ||
|
|
120250152f | ||
|
|
ac81f0b223 | ||
|
|
7838058953 | ||
|
|
f95156e924 | ||
|
|
74de676bdf | ||
|
|
2762f1fc85 | ||
|
|
438efd0826 | ||
|
|
e586ab708b | ||
|
|
24ca5135aa | ||
|
|
fae354fcce | ||
|
|
4ae372f022 | ||
|
|
0b5907f12b | ||
|
|
00a97d9266 | ||
|
|
35afe880a1 | ||
|
|
a6298f2753 | ||
|
|
e69283f488 | ||
|
|
09ed3c4b81 | ||
|
|
f5e2a4c0fc | ||
|
|
4e200b4a08 | ||
|
|
5ae1f70d9e | ||
|
|
3f781d262b | ||
|
|
a34c1591e3 | ||
|
|
9f76fda18c | ||
|
|
d8ae679937 | ||
|
|
f3b0b60abd | ||
|
|
df8be91a07 | ||
|
|
c9ddd2a7d6 | ||
|
|
6b1e910126 | ||
|
|
14550a9a6c | ||
|
|
b80c5f6a7c | ||
|
|
e46d44f208 | ||
|
|
a1a4bc9d3e | ||
|
|
0900178d24 | ||
|
|
5d31429ca8 | ||
|
|
2f35ceb803 | ||
|
|
8e6c02ecde | ||
|
|
231fd36d46 | ||
|
|
0b6a53a079 | ||
|
|
6167edf97c | ||
|
|
4ed0ad3c9c | ||
|
|
2375f0b05f | ||
|
|
eced377ae4 | ||
|
|
84262fe143 | ||
|
|
612eb8c630 | ||
|
|
866fb48afb | ||
|
|
353776e8e1 | ||
|
|
594e2f24ef | ||
|
|
3275a9e459 | ||
|
|
3bad8c8e8c | ||
|
|
d0af4499d6 | ||
|
|
4357ea56ae | ||
|
|
5df5ca87a2 | ||
|
|
8387fe6fcb | ||
|
|
e95de2ee4c | ||
|
|
71902e5933 | ||
|
|
53eab14cef | ||
|
|
925bb91d8e | ||
|
|
ec2ca6f986 | ||
|
|
96ea0194e8 | ||
|
|
2776992101 | ||
|
|
85f001982e | ||
|
|
d03a2c4096 | ||
|
|
8c5890482a | ||
|
|
10ce280a96 | ||
|
|
0dd7ac3d2e | ||
|
|
4ee53a9445 | ||
|
|
c08292d90e | ||
|
|
3142131cdd | ||
|
|
530c1a0734 | ||
|
|
52dc1769a1 | ||
|
|
b9583c0b48 | ||
|
|
89d7f54e90 | ||
|
|
d884da2b0b | ||
|
|
f7d485082c | ||
|
|
130ab1fe1a | ||
|
|
2ff83679fe | ||
|
|
de49b73855 | ||
|
|
c12e68f5d1 | ||
|
|
baa9bbd505 | ||
|
|
349cd5d031 | ||
|
|
49277d27f8 | ||
|
|
2aae5705c9 | ||
|
|
38d0579660 | ||
|
|
673ba769c7 | ||
|
|
b922730482 | ||
|
|
ba973eb1fe | ||
|
|
d8dde6c544 | ||
|
|
d359a716a7 | ||
|
|
ba4293912b | ||
|
|
ee359120ee | ||
|
|
04a25f4327 | ||
|
|
7c22756e66 | ||
|
|
a03668f5b2 | ||
|
|
e4a841a0c5 | ||
|
|
38dc31a49b | ||
|
|
c42b15125c | ||
|
|
590d39bc8d | ||
|
|
79bbb248bb | ||
|
|
5cf0b7b038 | ||
|
|
fb1a056561 | ||
|
|
75e1ce4c9c | ||
|
|
d6c6e3bb7a | ||
|
|
821c1a9488 | ||
|
|
76eeea1f64 | ||
|
|
2ab07f9fc3 | ||
|
|
a85b9d7801 |
@@ -18,7 +18,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: postgres:18-alpine
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
|
||||
@@ -66,7 +66,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: postgres:18-alpine
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
|
||||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -3,7 +3,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request_target:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
env:
|
||||
@@ -60,15 +60,17 @@ jobs:
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v3
|
||||
# with:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: backend-test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.xml
|
||||
@@ -94,9 +96,9 @@ jobs:
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: frontend-test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
@@ -132,9 +134,9 @@ jobs:
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
# PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
# CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
# CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
@@ -169,7 +171,7 @@ jobs:
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage
|
||||
|
||||
3
.github/workflows/periodic-snapshot.yml
vendored
3
.github/workflows/periodic-snapshot.yml
vendored
@@ -2,7 +2,7 @@ name: Periodic Snapshot
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '10 0 1 * *' # 10 minutes after midnight on the first of every month
|
||||
- cron: '10 0 1 * *' # 10 minutes after midnight on the first day of every month
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bump:
|
||||
@@ -24,6 +24,7 @@ permissions:
|
||||
jobs:
|
||||
bump-version-and-tag:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref_name == github.event.repository.default_branch
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
||||
117
.github/workflows/preview-image.yml
vendored
117
.github/workflows/preview-image.yml
vendored
@@ -4,6 +4,15 @@ on:
|
||||
tags:
|
||||
- '*-dev'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dockerRepository:
|
||||
description: 'Docker repository'
|
||||
required: true
|
||||
default: 'preview'
|
||||
type: choice
|
||||
options:
|
||||
- preview
|
||||
- redash
|
||||
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
@@ -23,6 +32,9 @@ jobs:
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ vars.DOCKER_REPOSITORY }}" == '' ]]; then
|
||||
echo 'Docker repository is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
@@ -30,7 +42,20 @@ jobs:
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-22.04
|
||||
- arch: arm64
|
||||
os: ubuntu-22.04-arm
|
||||
outputs:
|
||||
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
|
||||
needs:
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
@@ -45,11 +70,6 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
@@ -60,6 +80,8 @@ jobs:
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
@@ -72,23 +94,92 @@ jobs:
|
||||
VERSION_TAG=$(jq -r .version package.json)
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# TODO: We can use GitHub Actions's matrix option to reduce the build time.
|
||||
- name: Build and push preview image to Docker Hub
|
||||
id: build-preview
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
${{ vars.DOCKER_REPOSITORY }}/redash
|
||||
${{ vars.DOCKER_REPOSITORY }}/preview
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha,scope=multi-platform
|
||||
cache-to: type=gha,mode=max,scope=multi-platform
|
||||
platforms: linux/amd64,linux/arm64
|
||||
outputs: type=image,push-by-digest=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: Build and push release image to Docker Hub
|
||||
id: build-release
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
with:
|
||||
tags: |
|
||||
${{ vars.DOCKER_REPOSITORY }}/redash:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
outputs: type=image,push-by-digest=false,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
|
||||
digest="${{ steps.build-preview.outputs.digest}}"
|
||||
else
|
||||
digest="${{ steps.build-release.outputs.digest}}"
|
||||
fi
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
|
||||
merge-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: build-docker-image
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Create and push manifest for the preview image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:preview \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:preview@sha256:%s ' *)
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
|
||||
- name: Create and push manifest for the release image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
|
||||
36
.github/workflows/restyled.yml
vendored
Normal file
36
.github/workflows/restyled.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Restyled
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
restyled:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- uses: restyled-io/actions/setup@v4
|
||||
- id: restyler
|
||||
uses: restyled-io/actions/run@v4
|
||||
with:
|
||||
fail-on-differences: true
|
||||
|
||||
- if: |
|
||||
!cancelled() &&
|
||||
steps.restyler.outputs.success == 'true' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
base: ${{ steps.restyler.outputs.restyled-base }}
|
||||
branch: ${{ steps.restyler.outputs.restyled-head }}
|
||||
title: ${{ steps.restyler.outputs.restyled-title }}
|
||||
body: ${{ steps.restyler.outputs.restyled-body }}
|
||||
labels: "restyled"
|
||||
reviewers: ${{ github.event.pull_request.user.login }}
|
||||
delete-branch: true
|
||||
43
Dockerfile
43
Dockerfile
@@ -27,7 +27,15 @@ RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network
|
||||
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
RUN <<EOF
|
||||
if [ "x$skip_frontend_build" = "x" ]; then
|
||||
yarn build
|
||||
else
|
||||
mkdir -p /frontend/client/dist
|
||||
touch /frontend/client/dist/multi_org.html
|
||||
touch /frontend/client/dist/index.html
|
||||
fi
|
||||
EOF
|
||||
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
@@ -67,24 +75,27 @@ RUN apt-get update && \
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg \
|
||||
&& curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/simba \
|
||||
&& dpkg -i /tmp/simba/*.deb \
|
||||
&& printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
|
||||
&& rm /tmp/simba_odbc.zip \
|
||||
&& rm -rf /tmp/simba; fi
|
||||
RUN <<EOF
|
||||
if [ "$TARGETPLATFORM" = "linux/amd64" ]; then
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg
|
||||
curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list
|
||||
apt-get update
|
||||
ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip
|
||||
chmod 600 /tmp/simba_odbc.zip
|
||||
unzip /tmp/simba_odbc.zip -d /tmp/simba
|
||||
dpkg -i /tmp/simba/*.deb
|
||||
printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini
|
||||
rm /tmp/simba_odbc.zip
|
||||
rm -rf /tmp/simba
|
||||
fi
|
||||
EOF
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV POETRY_VERSION=1.8.3
|
||||
ENV POETRY_VERSION=2.1.4
|
||||
ENV POETRY_HOME=/etc/poetry
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
13
Makefile
13
Makefile
@@ -1,10 +1,14 @@
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
|
||||
up:
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build
|
||||
docker compose up -d redis postgres --remove-orphans
|
||||
docker compose exec -u postgres postgres psql postgres --csv \
|
||||
-1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \
|
||||
| grep -q "organizations" || make create_database
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build --remove-orphans
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
@@ -28,11 +32,6 @@ clean:
|
||||
docker image prune --force
|
||||
docker volume prune --force
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
|
||||
down:
|
||||
docker compose down
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ server() {
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT --limit-request-line ${REDASH_GUNICORN_LIMIT_REQUEST_LINE:-0}
|
||||
}
|
||||
|
||||
create_db() {
|
||||
@@ -67,7 +67,7 @@ help() {
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "debug -- start Flask development server with remote debugger via debugpy"
|
||||
echo "create_db -- create database tables"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
|
||||
@@ -15,7 +15,7 @@ body {
|
||||
display: table;
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
height: calc(100vh - 116px);
|
||||
height: calc(100% - 116px);
|
||||
}
|
||||
|
||||
@media (min-width: 992px) {
|
||||
|
||||
BIN
client/app/assets/images/db-logos/duckdb.png
Normal file
BIN
client/app/assets/images/db-logos/duckdb.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.0 KiB |
@@ -20,7 +20,7 @@ html {
|
||||
|
||||
html,
|
||||
body {
|
||||
min-height: 100vh;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
@@ -35,7 +35,7 @@ body {
|
||||
}
|
||||
|
||||
#application-root {
|
||||
min-height: 100vh;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#application-root,
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
vertical-align: middle;
|
||||
display: inline-block;
|
||||
width: 1px;
|
||||
height: 100vh;
|
||||
height: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ body.fixed-layout {
|
||||
padding-bottom: 0;
|
||||
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
height: 100%;
|
||||
|
||||
.application-layout-content > div {
|
||||
display: flex;
|
||||
@@ -90,7 +90,7 @@ body.fixed-layout {
|
||||
.embed__vis {
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
height: calc(~'100vh - 25px');
|
||||
height: calc(~'100% - 25px');
|
||||
|
||||
> .embed-heading {
|
||||
flex: 0 0 auto;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import React from "react";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import { clientConfig, currentUser } from "@/services/auth";
|
||||
import frontendVersion from "@/version.json";
|
||||
|
||||
export default function VersionInfo() {
|
||||
@@ -9,6 +10,15 @@ export default function VersionInfo() {
|
||||
Version: {clientConfig.version}
|
||||
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
|
||||
</div>
|
||||
{clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
|
||||
<div className="m-t-10">
|
||||
{/* eslint-disable react/jsx-no-target-blank */}
|
||||
<Link href="https://version.redash.io/" className="update-available" target="_blank" rel="noopener">
|
||||
Update Available <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,10 +7,10 @@ body #application-root {
|
||||
flex-direction: row;
|
||||
justify-content: stretch;
|
||||
padding-bottom: 0 !important;
|
||||
height: 100vh;
|
||||
height: 100%;
|
||||
|
||||
.application-layout-side-menu {
|
||||
height: 100vh;
|
||||
height: 100%;
|
||||
position: relative;
|
||||
|
||||
@media @mobileBreakpoint {
|
||||
@@ -47,6 +47,10 @@ body #application-root {
|
||||
}
|
||||
}
|
||||
|
||||
body > section {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body.fixed-layout #application-root {
|
||||
.application-layout-content {
|
||||
padding-bottom: 0;
|
||||
|
||||
79
client/app/components/BeaconConsent.jsx
Normal file
79
client/app/components/BeaconConsent.jsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import React, { useState } from "react";
|
||||
import Card from "antd/lib/card";
|
||||
import Button from "antd/lib/button";
|
||||
import Typography from "antd/lib/typography";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import OrgSettings from "@/services/organizationSettings";
|
||||
|
||||
const Text = Typography.Text;
|
||||
|
||||
function BeaconConsent() {
|
||||
const [hide, setHide] = useState(false);
|
||||
|
||||
if (!clientConfig.showBeaconConsentMessage || hide) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hideConsentCard = () => {
|
||||
clientConfig.showBeaconConsentMessage = false;
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = (confirm) => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
message = "Settings Saved.";
|
||||
}
|
||||
|
||||
OrgSettings.save({ beacon_consent: confirm }, message)
|
||||
// .then(() => {
|
||||
// // const settings = get(response, 'settings');
|
||||
// // this.setState({ settings, formValues: { ...settings } });
|
||||
// })
|
||||
.finally(hideConsentCard);
|
||||
};
|
||||
|
||||
return (
|
||||
<DynamicComponent name="BeaconConsent">
|
||||
<div className="m-t-10 tiled">
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
Would you be ok with sharing anonymous usage data with the Redash team?{" "}
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}
|
||||
>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
<li> Number of users, queries, dashboards, alerts, widgets and visualizations.</li>
|
||||
<li> Types of data sources, alert destinations and visualizations.</li>
|
||||
</ul>
|
||||
</div>
|
||||
<Text>All data is aggregated and will never include any sensitive or private data.</Text>
|
||||
<div className="m-t-5">
|
||||
<Button type="primary" className="m-r-5" onClick={() => confirmConsent(true)}>
|
||||
Yes
|
||||
</Button>
|
||||
<Button type="default" onClick={() => confirmConsent(false)}>
|
||||
No
|
||||
</Button>
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the <Link href="settings/general">Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
export default BeaconConsent;
|
||||
@@ -23,6 +23,7 @@ export const TYPES = mapValues(
|
||||
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
|
||||
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
|
||||
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
|
||||
USAGE_DATA_SHARING: ["/open-source/admin-guide/usage-data", "Help: Anonymous Usage Data Sharing"],
|
||||
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
|
||||
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
|
||||
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
|
||||
@@ -100,7 +101,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
}
|
||||
|
||||
loadIframe = url => {
|
||||
loadIframe = (url) => {
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
this.setState({ loading: true, error: false });
|
||||
|
||||
@@ -115,8 +116,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
};
|
||||
|
||||
onPostMessageReceived = event => {
|
||||
if (!some(allowedDomains, domain => startsWith(event.origin, domain))) {
|
||||
onPostMessageReceived = (event) => {
|
||||
if (!some(allowedDomains, (domain) => startsWith(event.origin, domain))) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -133,7 +134,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
return helpTriggerType ? helpTriggerType[0] : this.props.href;
|
||||
};
|
||||
|
||||
openDrawer = e => {
|
||||
openDrawer = (e) => {
|
||||
// keep "open in new tab" behavior
|
||||
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
|
||||
e.preventDefault();
|
||||
@@ -143,7 +144,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
}
|
||||
};
|
||||
|
||||
closeDrawer = event => {
|
||||
closeDrawer = (event) => {
|
||||
if (event) {
|
||||
event.preventDefault();
|
||||
}
|
||||
@@ -160,7 +161,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
|
||||
const className = cx("help-trigger", this.props.className);
|
||||
const url = this.state.currentUrl;
|
||||
const isAllowedDomain = some(allowedDomains, domain => startsWith(url || targetUrl, domain));
|
||||
const isAllowedDomain = some(allowedDomains, (domain) => startsWith(url || targetUrl, domain));
|
||||
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
|
||||
|
||||
return (
|
||||
@@ -179,13 +180,15 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
)}
|
||||
</>
|
||||
) : null
|
||||
}>
|
||||
}
|
||||
>
|
||||
<Link
|
||||
href={url || this.getUrl()}
|
||||
className={className}
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}>
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}
|
||||
>
|
||||
{this.props.children}
|
||||
</Link>
|
||||
</Tooltip>
|
||||
@@ -196,7 +199,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
visible={this.state.visible}
|
||||
className={cx("help-drawer", drawerClassName)}
|
||||
destroyOnClose
|
||||
width={400}>
|
||||
width={400}
|
||||
>
|
||||
<div className="drawer-wrapper">
|
||||
<div className="drawer-menu">
|
||||
{url && (
|
||||
|
||||
@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
|
||||
// DataSourcePreviewCard
|
||||
|
||||
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
|
||||
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
|
||||
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
|
||||
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
|
||||
return (
|
||||
<PreviewCard {...props} imageUrl={imageUrl} title={title}>
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
right: 0;
|
||||
background: linear-gradient(to bottom, transparent, transparent 2px, #f6f8f9 2px, #f6f8f9 5px),
|
||||
linear-gradient(to left, #b3babf, #b3babf 1px, transparent 1px, transparent);
|
||||
background-size: calc((100% + 15px) / 6) 5px;
|
||||
background-size: calc((100% + 15px) / 12) 5px;
|
||||
background-position: -7px 1px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,121 +9,85 @@ const DYNAMIC_DATE_OPTIONS = [
|
||||
name: "This week",
|
||||
value: getDynamicDateRangeFromString("d_this_week"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_this_week")
|
||||
.value()[0]
|
||||
.format("MMM D") +
|
||||
getDynamicDateRangeFromString("d_this_week").value()[0].format("MMM D") +
|
||||
" - " +
|
||||
getDynamicDateRangeFromString("d_this_week")
|
||||
.value()[1]
|
||||
.format("MMM D"),
|
||||
getDynamicDateRangeFromString("d_this_week").value()[1].format("MMM D"),
|
||||
},
|
||||
{
|
||||
name: "This month",
|
||||
value: getDynamicDateRangeFromString("d_this_month"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_this_month")
|
||||
.value()[0]
|
||||
.format("MMMM"),
|
||||
label: () => getDynamicDateRangeFromString("d_this_month").value()[0].format("MMMM"),
|
||||
},
|
||||
{
|
||||
name: "This year",
|
||||
value: getDynamicDateRangeFromString("d_this_year"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_this_year")
|
||||
.value()[0]
|
||||
.format("YYYY"),
|
||||
label: () => getDynamicDateRangeFromString("d_this_year").value()[0].format("YYYY"),
|
||||
},
|
||||
{
|
||||
name: "Last week",
|
||||
value: getDynamicDateRangeFromString("d_last_week"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_week")
|
||||
.value()[0]
|
||||
.format("MMM D") +
|
||||
getDynamicDateRangeFromString("d_last_week").value()[0].format("MMM D") +
|
||||
" - " +
|
||||
getDynamicDateRangeFromString("d_last_week")
|
||||
.value()[1]
|
||||
.format("MMM D"),
|
||||
getDynamicDateRangeFromString("d_last_week").value()[1].format("MMM D"),
|
||||
},
|
||||
{
|
||||
name: "Last month",
|
||||
value: getDynamicDateRangeFromString("d_last_month"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_month")
|
||||
.value()[0]
|
||||
.format("MMMM"),
|
||||
label: () => getDynamicDateRangeFromString("d_last_month").value()[0].format("MMMM"),
|
||||
},
|
||||
{
|
||||
name: "Last year",
|
||||
value: getDynamicDateRangeFromString("d_last_year"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_year")
|
||||
.value()[0]
|
||||
.format("YYYY"),
|
||||
label: () => getDynamicDateRangeFromString("d_last_year").value()[0].format("YYYY"),
|
||||
},
|
||||
{
|
||||
name: "Last 7 days",
|
||||
value: getDynamicDateRangeFromString("d_last_7_days"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_7_days")
|
||||
.value()[0]
|
||||
.format("MMM D") + " - Today",
|
||||
label: () => getDynamicDateRangeFromString("d_last_7_days").value()[0].format("MMM D") + " - Today",
|
||||
},
|
||||
{
|
||||
name: "Last 14 days",
|
||||
value: getDynamicDateRangeFromString("d_last_14_days"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_14_days")
|
||||
.value()[0]
|
||||
.format("MMM D") + " - Today",
|
||||
label: () => getDynamicDateRangeFromString("d_last_14_days").value()[0].format("MMM D") + " - Today",
|
||||
},
|
||||
{
|
||||
name: "Last 30 days",
|
||||
value: getDynamicDateRangeFromString("d_last_30_days"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_30_days")
|
||||
.value()[0]
|
||||
.format("MMM D") + " - Today",
|
||||
label: () => getDynamicDateRangeFromString("d_last_30_days").value()[0].format("MMM D") + " - Today",
|
||||
},
|
||||
{
|
||||
name: "Last 60 days",
|
||||
value: getDynamicDateRangeFromString("d_last_60_days"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_60_days")
|
||||
.value()[0]
|
||||
.format("MMM D") + " - Today",
|
||||
label: () => getDynamicDateRangeFromString("d_last_60_days").value()[0].format("MMM D") + " - Today",
|
||||
},
|
||||
{
|
||||
name: "Last 90 days",
|
||||
value: getDynamicDateRangeFromString("d_last_90_days"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_last_90_days")
|
||||
.value()[0]
|
||||
.format("MMM D") + " - Today",
|
||||
label: () => getDynamicDateRangeFromString("d_last_90_days").value()[0].format("MMM D") + " - Today",
|
||||
},
|
||||
{
|
||||
name: "Last 12 months",
|
||||
value: getDynamicDateRangeFromString("d_last_12_months"),
|
||||
label: null,
|
||||
},
|
||||
{
|
||||
name: "Last 10 years",
|
||||
value: getDynamicDateRangeFromString("d_last_10_years"),
|
||||
label: null,
|
||||
},
|
||||
];
|
||||
|
||||
const DYNAMIC_DATETIME_OPTIONS = [
|
||||
{
|
||||
name: "Today",
|
||||
value: getDynamicDateRangeFromString("d_today"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_today")
|
||||
.value()[0]
|
||||
.format("MMM D"),
|
||||
label: () => getDynamicDateRangeFromString("d_today").value()[0].format("MMM D"),
|
||||
},
|
||||
{
|
||||
name: "Yesterday",
|
||||
value: getDynamicDateRangeFromString("d_yesterday"),
|
||||
label: () =>
|
||||
getDynamicDateRangeFromString("d_yesterday")
|
||||
.value()[0]
|
||||
.format("MMM D"),
|
||||
label: () => getDynamicDateRangeFromString("d_yesterday").value()[0].format("MMM D"),
|
||||
},
|
||||
...DYNAMIC_DATE_OPTIONS,
|
||||
];
|
||||
|
||||
@@ -96,7 +96,7 @@ function EmptyState({
|
||||
}, []);
|
||||
|
||||
// Show if `onboardingMode=false` or any requested step not completed
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
|
||||
|
||||
if (!shouldShow) {
|
||||
return null;
|
||||
@@ -181,7 +181,7 @@ function EmptyState({
|
||||
];
|
||||
|
||||
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
|
||||
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
|
||||
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
|
||||
|
||||
return (
|
||||
<div className="empty-state-wrapper">
|
||||
@@ -196,7 +196,7 @@ function EmptyState({
|
||||
</div>
|
||||
<div className="empty-state__steps">
|
||||
<h4>Let's get started</h4>
|
||||
<ol>{stepsItems.map(item => item.node)}</ol>
|
||||
<ol>{stepsItems.map((item) => item.node)}</ol>
|
||||
{helpMessage}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -10,6 +10,10 @@ export interface PaginationOptions {
|
||||
itemsPerPage?: number;
|
||||
}
|
||||
|
||||
export interface SearchOptions {
|
||||
isServerSideFTS?: boolean;
|
||||
}
|
||||
|
||||
export interface Controller<I, P = any> {
|
||||
params: P; // TODO: Find out what params is (except merging with props)
|
||||
|
||||
@@ -18,7 +22,7 @@ export interface Controller<I, P = any> {
|
||||
|
||||
// search
|
||||
searchTerm?: string;
|
||||
updateSearch: (searchTerm: string) => void;
|
||||
updateSearch: (searchTerm: string, searchOptions?: SearchOptions) => void;
|
||||
|
||||
// tags
|
||||
selectedTags: string[];
|
||||
@@ -28,6 +32,7 @@ export interface Controller<I, P = any> {
|
||||
orderByField?: string;
|
||||
orderByReverse: boolean;
|
||||
toggleSorting: (orderByField: string) => void;
|
||||
setSorting: (orderByField: string, orderByReverse: boolean) => void;
|
||||
|
||||
// pagination
|
||||
page: number;
|
||||
@@ -93,7 +98,7 @@ export interface ItemsListWrappedComponentProps<I, P = any> {
|
||||
export function wrap<I, P = any>(
|
||||
WrappedComponent: React.ComponentType<ItemsListWrappedComponentProps<I>>,
|
||||
createItemsSource: () => ItemsSource,
|
||||
createStateStorage: () => StateStorage
|
||||
createStateStorage: ( { ...props }) => StateStorage
|
||||
) {
|
||||
class ItemsListWrapper extends React.Component<ItemsListWrapperProps, ItemsListWrapperState<I, P>> {
|
||||
private _itemsSource: ItemsSource;
|
||||
@@ -116,7 +121,7 @@ export function wrap<I, P = any>(
|
||||
constructor(props: ItemsListWrapperProps) {
|
||||
super(props);
|
||||
|
||||
const stateStorage = createStateStorage();
|
||||
const stateStorage = createStateStorage({ ...props });
|
||||
const itemsSource = createItemsSource();
|
||||
this._itemsSource = itemsSource;
|
||||
|
||||
@@ -139,11 +144,33 @@ export function wrap<I, P = any>(
|
||||
this.props.onError!(error);
|
||||
|
||||
const initialState = this.getState({ ...itemsSource.getState(), isLoaded: false });
|
||||
const { updatePagination, toggleSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
|
||||
const { updatePagination, toggleSorting, setSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
|
||||
|
||||
let isRunningUpdateSearch = false;
|
||||
let pendingUpdateSearchParams: any[] | null = null;
|
||||
const debouncedUpdateSearch = debounce(async (...params) => {
|
||||
// Avoid running multiple updateSerch concurrently.
|
||||
// If an updateSearch is already running, we save the params for the latest call.
|
||||
// When the current updateSearch is finished, we call debouncedUpdateSearch again with the saved params.
|
||||
if (isRunningUpdateSearch) {
|
||||
pendingUpdateSearchParams = params;
|
||||
return;
|
||||
}
|
||||
isRunningUpdateSearch = true;
|
||||
await updateSearch(...params);
|
||||
isRunningUpdateSearch = false;
|
||||
if (pendingUpdateSearchParams) {
|
||||
const pendingParams = pendingUpdateSearchParams;
|
||||
pendingUpdateSearchParams = null;
|
||||
debouncedUpdateSearch(...pendingParams);
|
||||
}
|
||||
}, 200);
|
||||
|
||||
this.state = {
|
||||
...initialState,
|
||||
toggleSorting, // eslint-disable-line react/no-unused-state
|
||||
updateSearch: debounce(updateSearch, 200), // eslint-disable-line react/no-unused-state
|
||||
setSorting, // eslint-disable-line react/no-unused-state
|
||||
updateSearch: debouncedUpdateSearch, // eslint-disable-line react/no-unused-state
|
||||
updateSelectedTags, // eslint-disable-line react/no-unused-state
|
||||
updatePagination, // eslint-disable-line react/no-unused-state
|
||||
update, // eslint-disable-line react/no-unused-state
|
||||
|
||||
@@ -39,14 +39,12 @@ export class ItemsSource {
|
||||
const customParams = {};
|
||||
const context = {
|
||||
...this.getCallbackContext(),
|
||||
setCustomParams: params => {
|
||||
setCustomParams: (params) => {
|
||||
extend(customParams, params);
|
||||
},
|
||||
};
|
||||
return this._beforeUpdate().then(() => {
|
||||
const fetchToken = Math.random()
|
||||
.toString(36)
|
||||
.substr(2);
|
||||
const fetchToken = Math.random().toString(36).substr(2);
|
||||
this._currentFetchToken = fetchToken;
|
||||
return this._fetcher
|
||||
.fetch(changes, state, context)
|
||||
@@ -59,7 +57,7 @@ export class ItemsSource {
|
||||
return this._afterUpdate();
|
||||
}
|
||||
})
|
||||
.catch(error => this.handleError(error));
|
||||
.catch((error) => this.handleError(error));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -124,28 +122,35 @@ export class ItemsSource {
|
||||
});
|
||||
};
|
||||
|
||||
toggleSorting = orderByField => {
|
||||
toggleSorting = (orderByField) => {
|
||||
this._sorter.toggleField(orderByField);
|
||||
this._savedOrderByField = this._sorter.field;
|
||||
this._changed({ sorting: true });
|
||||
};
|
||||
|
||||
updateSearch = searchTerm => {
|
||||
setSorting = (orderByField, orderByReverse) => {
|
||||
this._sorter.setField(orderByField);
|
||||
this._sorter.setReverse(orderByReverse);
|
||||
this._savedOrderByField = this._sorter.field;
|
||||
this._changed({ sorting: true });
|
||||
};
|
||||
|
||||
updateSearch = (searchTerm, options) => {
|
||||
// here we update state directly, but later `fetchData` will update it properly
|
||||
this._searchTerm = searchTerm;
|
||||
// in search mode ignore the ordering and use the ranking order
|
||||
// provided by the server-side FTS backend instead, unless it was
|
||||
// requested by the user by actively ordering in search mode
|
||||
if (searchTerm === "") {
|
||||
if (searchTerm === "" || !options?.isServerSideFTS) {
|
||||
this._sorter.setField(this._savedOrderByField); // restore ordering
|
||||
} else {
|
||||
this._sorter.setField(null);
|
||||
}
|
||||
this._paginator.setPage(1);
|
||||
this._changed({ search: true, pagination: { page: true } });
|
||||
return this._changed({ search: true, pagination: { page: true } });
|
||||
};
|
||||
|
||||
updateSelectedTags = selectedTags => {
|
||||
updateSelectedTags = (selectedTags) => {
|
||||
this._selectedTags = selectedTags;
|
||||
this._paginator.setPage(1);
|
||||
this._changed({ tags: true, pagination: { page: true } });
|
||||
@@ -153,7 +158,7 @@ export class ItemsSource {
|
||||
|
||||
update = () => this._changed();
|
||||
|
||||
handleError = error => {
|
||||
handleError = (error) => {
|
||||
if (isFunction(this.onError)) {
|
||||
this.onError(error);
|
||||
}
|
||||
@@ -172,7 +177,7 @@ export class ResourceItemsSource extends ItemsSource {
|
||||
processResults: (results, context) => {
|
||||
let processItem = getItemProcessor(context);
|
||||
processItem = isFunction(processItem) ? processItem : identity;
|
||||
return map(results, item => processItem(item, context));
|
||||
return map(results, (item) => processItem(item, context));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ export const Columns = {
|
||||
date(overrides) {
|
||||
return extend(
|
||||
{
|
||||
render: text => formatDate(text),
|
||||
render: (text) => formatDate(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -52,7 +52,7 @@ export const Columns = {
|
||||
dateTime(overrides) {
|
||||
return extend(
|
||||
{
|
||||
render: text => formatDateTime(text),
|
||||
render: (text) => formatDateTime(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -62,7 +62,7 @@ export const Columns = {
|
||||
{
|
||||
width: "1%",
|
||||
className: "text-nowrap",
|
||||
render: text => durationHumanize(text),
|
||||
render: (text) => durationHumanize(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -70,7 +70,7 @@ export const Columns = {
|
||||
timeAgo(overrides, timeAgoCustomProps = undefined) {
|
||||
return extend(
|
||||
{
|
||||
render: value => <TimeAgo date={value} {...timeAgoCustomProps} />,
|
||||
render: (value) => <TimeAgo date={value} {...timeAgoCustomProps} />,
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -110,6 +110,7 @@ export default class ItemsTable extends React.Component {
|
||||
orderByField: PropTypes.string,
|
||||
orderByReverse: PropTypes.bool,
|
||||
toggleSorting: PropTypes.func,
|
||||
setSorting: PropTypes.func,
|
||||
"data-test": PropTypes.string,
|
||||
rowKey: PropTypes.oneOfType([PropTypes.string, PropTypes.func]),
|
||||
};
|
||||
@@ -127,18 +128,15 @@ export default class ItemsTable extends React.Component {
|
||||
};
|
||||
|
||||
prepareColumns() {
|
||||
const { orderByField, orderByReverse, toggleSorting } = this.props;
|
||||
const { orderByField, orderByReverse } = this.props;
|
||||
const orderByDirection = orderByReverse ? "descend" : "ascend";
|
||||
|
||||
return map(
|
||||
map(
|
||||
filter(this.props.columns, column => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
|
||||
column => extend(column, { orderByField: column.orderByField || column.field })
|
||||
filter(this.props.columns, (column) => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
|
||||
(column) => extend(column, { orderByField: column.orderByField || column.field })
|
||||
),
|
||||
(column, index) => {
|
||||
// Bind click events only to sortable columns
|
||||
const onHeaderCell = column.sorter ? () => ({ onClick: () => toggleSorting(column.orderByField) }) : null;
|
||||
|
||||
// Wrap render function to pass correct arguments
|
||||
const render = isFunction(column.render) ? (text, row) => column.render(text, row.item) : identity;
|
||||
|
||||
@@ -146,14 +144,13 @@ export default class ItemsTable extends React.Component {
|
||||
key: "column" + index,
|
||||
dataIndex: ["item", column.field],
|
||||
defaultSortOrder: column.orderByField === orderByField ? orderByDirection : null,
|
||||
onHeaderCell,
|
||||
render,
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
getRowKey = record => {
|
||||
getRowKey = (record) => {
|
||||
const { rowKey } = this.props;
|
||||
if (rowKey) {
|
||||
if (isFunction(rowKey)) {
|
||||
@@ -172,22 +169,43 @@ export default class ItemsTable extends React.Component {
|
||||
|
||||
// Bind events only if `onRowClick` specified
|
||||
const onTableRow = isFunction(this.props.onRowClick)
|
||||
? row => ({
|
||||
onClick: event => {
|
||||
? (row) => ({
|
||||
onClick: (event) => {
|
||||
this.props.onRowClick(event, row.item);
|
||||
},
|
||||
})
|
||||
: null;
|
||||
|
||||
const onChange = (pagination, filters, sorter, extra) => {
|
||||
const action = extra?.action;
|
||||
if (action === "sort") {
|
||||
const propsColumn = this.props.columns.find((column) => column.field === sorter.field[1]);
|
||||
if (!propsColumn.sorter) {
|
||||
return;
|
||||
}
|
||||
let orderByField = propsColumn.orderByField;
|
||||
const orderByReverse = sorter.order === "descend";
|
||||
|
||||
if (orderByReverse === undefined) {
|
||||
orderByField = null;
|
||||
}
|
||||
if (this.props.setSorting) {
|
||||
this.props.setSorting(orderByField, orderByReverse);
|
||||
} else {
|
||||
this.props.toggleSorting(orderByField);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const { showHeader } = this.props;
|
||||
if (this.props.loading) {
|
||||
if (isEmpty(tableDataProps.dataSource)) {
|
||||
tableDataProps.columns = tableDataProps.columns.map(column => ({
|
||||
tableDataProps.columns = tableDataProps.columns.map((column) => ({
|
||||
...column,
|
||||
sorter: false,
|
||||
render: () => <Skeleton active paragraph={false} />,
|
||||
}));
|
||||
tableDataProps.dataSource = range(10).map(key => ({ key: `${key}` }));
|
||||
tableDataProps.dataSource = range(10).map((key) => ({ key: `${key}` }));
|
||||
} else {
|
||||
tableDataProps.loading = { indicator: null };
|
||||
}
|
||||
@@ -200,6 +218,7 @@ export default class ItemsTable extends React.Component {
|
||||
rowKey={this.getRowKey}
|
||||
pagination={false}
|
||||
onRow={onTableRow}
|
||||
onChange={onChange}
|
||||
data-test={this.props["data-test"]}
|
||||
{...tableDataProps}
|
||||
/>
|
||||
|
||||
@@ -47,20 +47,30 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
|
||||
return (
|
||||
<div {...props}>
|
||||
<div className="schema-list-item">
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
<Tooltip
|
||||
title={item.description}
|
||||
mouseEnterDelay={0}
|
||||
mouseLeaveDelay={0}
|
||||
placement="rightTop"
|
||||
trigger={item.description ? "hover" : ""}
|
||||
overlayStyle={{ whiteSpace: "pre-line" }}
|
||||
>
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
<Tooltip
|
||||
title="Insert table name into query text"
|
||||
mouseEnterDelay={0}
|
||||
mouseLeaveDelay={0}
|
||||
placement="topRight"
|
||||
arrowPointAtCenter>
|
||||
<PlainButton className="copy-to-editor" onClick={e => handleSelect(e, item.name)}>
|
||||
arrowPointAtCenter
|
||||
>
|
||||
<PlainButton className="copy-to-editor" onClick={(e) => handleSelect(e, item.name)}>
|
||||
<i className="fa fa-angle-double-right" aria-hidden="true" />
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
@@ -70,16 +80,23 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
|
||||
{item.loading ? (
|
||||
<div className="table-open">Loading...</div>
|
||||
) : (
|
||||
map(item.columns, column => {
|
||||
map(item.columns, (column) => {
|
||||
const columnName = get(column, "name");
|
||||
const columnType = get(column, "type");
|
||||
const columnDescription = get(column, "description");
|
||||
return (
|
||||
<Tooltip
|
||||
title="Insert column name into query text"
|
||||
title={"Insert column name into query text" + (columnDescription ? "\n" + columnDescription : "")}
|
||||
mouseEnterDelay={0}
|
||||
mouseLeaveDelay={0}
|
||||
placement="rightTop">
|
||||
<PlainButton key={columnName} className="table-open-item" onClick={e => handleSelect(e, columnName)}>
|
||||
placement="rightTop"
|
||||
overlayStyle={{ whiteSpace: "pre-line" }}
|
||||
>
|
||||
<PlainButton
|
||||
key={columnName}
|
||||
className="table-open-item"
|
||||
onClick={(e) => handleSelect(e, columnName)}
|
||||
>
|
||||
<div>
|
||||
{columnName} {columnType && <span className="column-type">{columnType}</span>}
|
||||
</div>
|
||||
@@ -168,7 +185,7 @@ export function SchemaList({ loading, schema, expandedFlags, onTableExpand, onIt
|
||||
}
|
||||
|
||||
export function applyFilterOnSchema(schema, filterString) {
|
||||
const filters = filter(filterString.toLowerCase().split(/\s+/), s => s.length > 0);
|
||||
const filters = filter(filterString.toLowerCase().split(/\s+/), (s) => s.length > 0);
|
||||
|
||||
// Empty string: return original schema
|
||||
if (filters.length === 0) {
|
||||
@@ -181,9 +198,9 @@ export function applyFilterOnSchema(schema, filterString) {
|
||||
const columnFilter = filters[0];
|
||||
return filter(
|
||||
schema,
|
||||
item =>
|
||||
(item) =>
|
||||
includes(item.name.toLowerCase(), nameFilter) ||
|
||||
some(item.columns, column => includes(get(column, "name").toLowerCase(), columnFilter))
|
||||
some(item.columns, (column) => includes(get(column, "name").toLowerCase(), columnFilter))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -191,11 +208,11 @@ export function applyFilterOnSchema(schema, filterString) {
|
||||
const nameFilter = filters[0];
|
||||
const columnFilter = filters[1];
|
||||
return filter(
|
||||
map(schema, item => {
|
||||
map(schema, (item) => {
|
||||
if (includes(item.name.toLowerCase(), nameFilter)) {
|
||||
item = {
|
||||
...item,
|
||||
columns: filter(item.columns, column => includes(get(column, "name").toLowerCase(), columnFilter)),
|
||||
columns: filter(item.columns, (column) => includes(get(column, "name").toLowerCase(), columnFilter)),
|
||||
};
|
||||
return item.columns.length > 0 ? item : null;
|
||||
}
|
||||
@@ -243,7 +260,7 @@ export default function SchemaBrowser({
|
||||
placeholder="Search schema..."
|
||||
aria-label="Search schema"
|
||||
disabled={schema.length === 0}
|
||||
onChange={event => handleFilterChange(event.target.value)}
|
||||
onChange={(event) => handleFilterChange(event.target.value)}
|
||||
/>
|
||||
|
||||
<Tooltip title="Refresh Schema">
|
||||
|
||||
@@ -59,6 +59,7 @@ function wrapComponentWithSettings(WrappedComponent) {
|
||||
"dateTimeFormat",
|
||||
"integerFormat",
|
||||
"floatFormat",
|
||||
"nullValue",
|
||||
"booleanValues",
|
||||
"tableCellMaxJSONSize",
|
||||
"allowCustomJSVisualizations",
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
export default {
|
||||
columns: 6, // grid columns count
|
||||
columns: 12, // grid columns count
|
||||
rowHeight: 50, // grid row height (incl. bottom padding)
|
||||
margins: 15, // widget margins
|
||||
mobileBreakPoint: 800,
|
||||
// defaults for widgets
|
||||
defaultSizeX: 3,
|
||||
defaultSizeX: 6,
|
||||
defaultSizeY: 3,
|
||||
minSizeX: 1,
|
||||
maxSizeX: 6,
|
||||
minSizeY: 1,
|
||||
minSizeX: 2,
|
||||
maxSizeX: 12,
|
||||
minSizeY: 2,
|
||||
maxSizeY: 1000,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<html lang="en" translate="no">
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta charset="UTF-8" />
|
||||
|
||||
@@ -81,12 +81,19 @@ function DashboardListExtraActions(props) {
|
||||
}
|
||||
|
||||
function DashboardList({ controller }) {
|
||||
let usedListColumns = listColumns;
|
||||
if (controller.params.currentPage === "favorites") {
|
||||
usedListColumns = [
|
||||
...usedListColumns,
|
||||
Columns.dateTime.sortable({ title: "Starred At", field: "starred_at", width: "1%" }),
|
||||
];
|
||||
}
|
||||
const {
|
||||
areExtraActionsAvailable,
|
||||
listColumns: tableColumns,
|
||||
Component: ExtraActionsComponent,
|
||||
selectedItems,
|
||||
} = useItemsListExtraActions(controller, listColumns, DashboardListExtraActions);
|
||||
} = useItemsListExtraActions(controller, usedListColumns, DashboardListExtraActions);
|
||||
|
||||
return (
|
||||
<div className="page-dashboard-list">
|
||||
@@ -139,9 +146,9 @@ function DashboardList({ controller }) {
|
||||
showPageSizeSelect
|
||||
totalCount={controller.totalItemsCount}
|
||||
pageSize={controller.itemsPerPage}
|
||||
onPageSizeChange={itemsPerPage => controller.updatePagination({ itemsPerPage })}
|
||||
onPageSizeChange={(itemsPerPage) => controller.updatePagination({ itemsPerPage })}
|
||||
page={controller.page}
|
||||
onChange={page => controller.updatePagination({ page })}
|
||||
onChange={(page) => controller.updatePagination({ page })}
|
||||
/>
|
||||
</div>
|
||||
</React.Fragment>
|
||||
@@ -170,10 +177,10 @@ const DashboardListPage = itemsList(
|
||||
}[currentPage];
|
||||
},
|
||||
getItemProcessor() {
|
||||
return item => new Dashboard(item);
|
||||
return (item) => new Dashboard(item);
|
||||
},
|
||||
}),
|
||||
() => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true })
|
||||
({ ...props }) => new UrlStateStorage({ orderByField: props.orderByField ?? "created_at", orderByReverse: true })
|
||||
);
|
||||
|
||||
routes.register(
|
||||
@@ -181,7 +188,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/dashboards",
|
||||
title: "Dashboards",
|
||||
render: pageProps => <DashboardListPage {...pageProps} currentPage="all" />,
|
||||
render: (pageProps) => <DashboardListPage {...pageProps} currentPage="all" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -189,7 +196,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/dashboards/favorites",
|
||||
title: "Favorite Dashboards",
|
||||
render: pageProps => <DashboardListPage {...pageProps} currentPage="favorites" />,
|
||||
render: (pageProps) => <DashboardListPage {...pageProps} currentPage="favorites" orderByField="starred_at" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -197,6 +204,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/dashboards/my",
|
||||
title: "My Dashboards",
|
||||
render: pageProps => <DashboardListPage {...pageProps} currentPage="my" />,
|
||||
render: (pageProps) => <DashboardListPage {...pageProps} currentPage="my" />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -31,7 +31,8 @@ function DashboardSettings({ dashboardConfiguration }) {
|
||||
<Checkbox
|
||||
checked={!!dashboard.dashboard_filters_enabled}
|
||||
onChange={({ target }) => updateDashboard({ dashboard_filters_enabled: target.checked })}
|
||||
data-test="DashboardFiltersCheckbox">
|
||||
data-test="DashboardFiltersCheckbox"
|
||||
>
|
||||
Use Dashboard Level Filters
|
||||
</Checkbox>
|
||||
</div>
|
||||
@@ -90,9 +91,9 @@ function DashboardComponent(props) {
|
||||
|
||||
const [pageContainer, setPageContainer] = useState(null);
|
||||
const [bottomPanelStyles, setBottomPanelStyles] = useState({});
|
||||
const onParametersEdit = parameters => {
|
||||
const onParametersEdit = (parameters) => {
|
||||
const paramOrder = map(parameters, "name");
|
||||
updateDashboard({ options: { globalParamOrder: paramOrder } });
|
||||
updateDashboard({ options: { ...dashboard.options, globalParamOrder: paramOrder } });
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
@@ -175,7 +176,7 @@ function DashboardPage({ dashboardSlug, dashboardId, onError }) {
|
||||
|
||||
useEffect(() => {
|
||||
Dashboard.get({ id: dashboardId, slug: dashboardSlug })
|
||||
.then(dashboardData => {
|
||||
.then((dashboardData) => {
|
||||
recordEvent("view", "dashboard", dashboardData.id);
|
||||
setDashboard(dashboardData);
|
||||
|
||||
@@ -207,7 +208,7 @@ routes.register(
|
||||
"Dashboards.LegacyViewOrEdit",
|
||||
routeWithUserSession({
|
||||
path: "/dashboard/:dashboardSlug",
|
||||
render: pageProps => <DashboardPage {...pageProps} />,
|
||||
render: (pageProps) => <DashboardPage {...pageProps} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -215,6 +216,6 @@ routes.register(
|
||||
"Dashboards.ViewOrEdit",
|
||||
routeWithUserSession({
|
||||
path: "/dashboards/:dashboardId([^-]+)(-.*)?",
|
||||
render: pageProps => <DashboardPage {...pageProps} />,
|
||||
render: (pageProps) => <DashboardPage {...pageProps} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
}
|
||||
|
||||
> .container {
|
||||
min-height: calc(100vh - 95px);
|
||||
min-height: calc(100% - 95px);
|
||||
}
|
||||
|
||||
.loading-message {
|
||||
|
||||
@@ -22,7 +22,7 @@ import { DashboardStatusEnum } from "../hooks/useDashboard";
|
||||
import "./DashboardHeader.less";
|
||||
|
||||
function getDashboardTags() {
|
||||
return getTags("api/dashboards/tags").then(tags => map(tags, t => t.name));
|
||||
return getTags("api/dashboards/tags").then((tags) => map(tags, (t) => t.name));
|
||||
}
|
||||
|
||||
function buttonType(value) {
|
||||
@@ -38,7 +38,7 @@ function DashboardPageTitle({ dashboardConfiguration }) {
|
||||
<h3>
|
||||
<EditInPlace
|
||||
isEditable={editingLayout}
|
||||
onDone={name => updateDashboard({ name })}
|
||||
onDone={(name) => updateDashboard({ name })}
|
||||
value={dashboard.name}
|
||||
ignoreBlanks
|
||||
/>
|
||||
@@ -53,7 +53,7 @@ function DashboardPageTitle({ dashboardConfiguration }) {
|
||||
isArchived={dashboard.is_archived}
|
||||
canEdit={canEditDashboard}
|
||||
getAvailableTags={getDashboardTags}
|
||||
onEdit={tags => updateDashboard({ tags })}
|
||||
onEdit={(tags) => updateDashboard({ tags })}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@@ -89,14 +89,15 @@ function RefreshButton({ dashboardConfiguration }) {
|
||||
placement="bottomRight"
|
||||
overlay={
|
||||
<Menu onClick={onRefreshRateSelected} selectedKeys={[`${refreshRate}`]}>
|
||||
{refreshRateOptions.map(option => (
|
||||
{refreshRateOptions.map((option) => (
|
||||
<Menu.Item key={`${option}`} disabled={!includes(allowedIntervals, option)}>
|
||||
{durationHumanize(option)}
|
||||
</Menu.Item>
|
||||
))}
|
||||
{refreshRate && <Menu.Item key={null}>Disable auto refresh</Menu.Item>}
|
||||
</Menu>
|
||||
}>
|
||||
}
|
||||
>
|
||||
<Button className="icon-button hidden-xs" type={buttonType(refreshRate)}>
|
||||
<i className="fa fa-angle-down" aria-hidden="true" />
|
||||
<span className="sr-only">Split button!</span>
|
||||
@@ -166,7 +167,8 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
|
||||
<PlainButton onClick={archive}>Archive</PlainButton>
|
||||
</Menu.Item>
|
||||
</Menu>
|
||||
}>
|
||||
}
|
||||
>
|
||||
<Button className="icon-button m-l-5" data-test="DashboardMoreButton" aria-label="More actions">
|
||||
<EllipsisOutlinedIcon rotate={90} aria-hidden="true" />
|
||||
</Button>
|
||||
@@ -216,7 +218,8 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
|
||||
type={buttonType(fullscreen)}
|
||||
className="icon-button m-l-5"
|
||||
onClick={toggleFullscreen}
|
||||
aria-label="Toggle fullscreen display">
|
||||
aria-label="Toggle fullscreen display"
|
||||
>
|
||||
<i className="zmdi zmdi-fullscreen" aria-hidden="true" />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
@@ -229,7 +232,8 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
|
||||
type={buttonType(dashboard.publicAccessEnabled)}
|
||||
onClick={showShareDashboardDialog}
|
||||
data-test="OpenShareForm"
|
||||
aria-label="Share">
|
||||
aria-label="Share"
|
||||
>
|
||||
<i className="zmdi zmdi-share" aria-hidden="true" />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
@@ -252,7 +256,11 @@ function DashboardEditControl({ dashboardConfiguration, headerExtra }) {
|
||||
doneBtnClickedWhileSaving,
|
||||
dashboardStatus,
|
||||
retrySaveDashboardLayout,
|
||||
saveDashboardParameters,
|
||||
} = dashboardConfiguration;
|
||||
const handleDoneEditing = () => {
|
||||
saveDashboardParameters().then(() => setEditingLayout(false));
|
||||
};
|
||||
let status;
|
||||
if (dashboardStatus === DashboardStatusEnum.SAVED) {
|
||||
status = <span className="save-status">Saved</span>;
|
||||
@@ -277,7 +285,7 @@ function DashboardEditControl({ dashboardConfiguration, headerExtra }) {
|
||||
Retry
|
||||
</Button>
|
||||
) : (
|
||||
<Button loading={doneBtnClickedWhileSaving} type="primary" onClick={() => setEditingLayout(false)}>
|
||||
<Button loading={doneBtnClickedWhileSaving} type="primary" onClick={handleDoneEditing}>
|
||||
{!doneBtnClickedWhileSaving && <i className="fa fa-check m-r-5" aria-hidden="true" />} Done Editing
|
||||
</Button>
|
||||
)}
|
||||
|
||||
@@ -22,12 +22,12 @@ export { DashboardStatusEnum } from "./useEditModeHandler";
|
||||
|
||||
function getAffectedWidgets(widgets, updatedParameters = []) {
|
||||
return !isEmpty(updatedParameters)
|
||||
? widgets.filter(widget =>
|
||||
? widgets.filter((widget) =>
|
||||
Object.values(widget.getParameterMappings())
|
||||
.filter(({ type }) => type === "dashboard-level")
|
||||
.some(({ mapTo }) =>
|
||||
includes(
|
||||
updatedParameters.map(p => p.name),
|
||||
updatedParameters.map((p) => p.name),
|
||||
mapTo
|
||||
)
|
||||
)
|
||||
@@ -50,7 +50,7 @@ function useDashboard(dashboardData) {
|
||||
[dashboard]
|
||||
);
|
||||
const hasOnlySafeQueries = useMemo(
|
||||
() => every(dashboard.widgets, w => (w.getQuery() ? w.getQuery().is_safe : true)),
|
||||
() => every(dashboard.widgets, (w) => (w.getQuery() ? w.getQuery().is_safe : true)),
|
||||
[dashboard]
|
||||
);
|
||||
|
||||
@@ -67,19 +67,19 @@ function useDashboard(dashboardData) {
|
||||
|
||||
const updateDashboard = useCallback(
|
||||
(data, includeVersion = true) => {
|
||||
setDashboard(currentDashboard => extend({}, currentDashboard, data));
|
||||
setDashboard((currentDashboard) => extend({}, currentDashboard, data));
|
||||
data = { ...data, id: dashboard.id };
|
||||
if (includeVersion) {
|
||||
data = { ...data, version: dashboard.version };
|
||||
}
|
||||
return Dashboard.save(data)
|
||||
.then(updatedDashboard => {
|
||||
setDashboard(currentDashboard => extend({}, currentDashboard, pick(updatedDashboard, keys(data))));
|
||||
.then((updatedDashboard) => {
|
||||
setDashboard((currentDashboard) => extend({}, currentDashboard, pick(updatedDashboard, keys(data))));
|
||||
if (has(data, "name")) {
|
||||
location.setPath(url.parse(updatedDashboard.url).pathname, true);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
const status = get(error, "response.status");
|
||||
if (status === 403) {
|
||||
notification.error("Dashboard update failed", "Permission Denied.");
|
||||
@@ -102,25 +102,25 @@ function useDashboard(dashboardData) {
|
||||
|
||||
const loadWidget = useCallback((widget, forceRefresh = false) => {
|
||||
widget.getParametersDefs(); // Force widget to read parameters values from URL
|
||||
setDashboard(currentDashboard => extend({}, currentDashboard));
|
||||
setDashboard((currentDashboard) => extend({}, currentDashboard));
|
||||
return widget
|
||||
.load(forceRefresh)
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
// QueryResultErrors are expected
|
||||
if (error instanceof QueryResultError) {
|
||||
return;
|
||||
}
|
||||
return Promise.reject(error);
|
||||
})
|
||||
.finally(() => setDashboard(currentDashboard => extend({}, currentDashboard)));
|
||||
.finally(() => setDashboard((currentDashboard) => extend({}, currentDashboard)));
|
||||
}, []);
|
||||
|
||||
const refreshWidget = useCallback(widget => loadWidget(widget, true), [loadWidget]);
|
||||
const refreshWidget = useCallback((widget) => loadWidget(widget, true), [loadWidget]);
|
||||
|
||||
const removeWidget = useCallback(widgetId => {
|
||||
setDashboard(currentDashboard =>
|
||||
const removeWidget = useCallback((widgetId) => {
|
||||
setDashboard((currentDashboard) =>
|
||||
extend({}, currentDashboard, {
|
||||
widgets: currentDashboard.widgets.filter(widget => widget.id !== undefined && widget.id !== widgetId),
|
||||
widgets: currentDashboard.widgets.filter((widget) => widget.id !== undefined && widget.id !== widgetId),
|
||||
})
|
||||
);
|
||||
}, []);
|
||||
@@ -132,11 +132,11 @@ function useDashboard(dashboardData) {
|
||||
(forceRefresh = false, updatedParameters = []) => {
|
||||
const affectedWidgets = getAffectedWidgets(dashboardRef.current.widgets, updatedParameters);
|
||||
const loadWidgetPromises = compact(
|
||||
affectedWidgets.map(widget => loadWidget(widget, forceRefresh).catch(error => error))
|
||||
affectedWidgets.map((widget) => loadWidget(widget, forceRefresh).catch((error) => error))
|
||||
);
|
||||
|
||||
return Promise.all(loadWidgetPromises).then(() => {
|
||||
const queryResults = compact(map(dashboardRef.current.widgets, widget => widget.getQueryResult()));
|
||||
const queryResults = compact(map(dashboardRef.current.widgets, (widget) => widget.getQueryResult()));
|
||||
const updatedFilters = collectDashboardFilters(dashboardRef.current, queryResults, location.search);
|
||||
setFilters(updatedFilters);
|
||||
});
|
||||
@@ -145,7 +145,7 @@ function useDashboard(dashboardData) {
|
||||
);
|
||||
|
||||
const refreshDashboard = useCallback(
|
||||
updatedParameters => {
|
||||
(updatedParameters) => {
|
||||
if (!refreshing) {
|
||||
setRefreshing(true);
|
||||
loadDashboard(true, updatedParameters).finally(() => setRefreshing(false));
|
||||
@@ -154,15 +154,30 @@ function useDashboard(dashboardData) {
|
||||
[refreshing, loadDashboard]
|
||||
);
|
||||
|
||||
const saveDashboardParameters = useCallback(() => {
|
||||
const currentDashboard = dashboardRef.current;
|
||||
|
||||
return updateDashboard({
|
||||
options: {
|
||||
...currentDashboard.options,
|
||||
parameters: map(globalParameters, (p) => p.toSaveableObject()),
|
||||
},
|
||||
}).catch((error) => {
|
||||
console.error("Failed to persist parameter values:", error);
|
||||
notification.error("Parameter values could not be saved. Your changes may not be persisted.");
|
||||
throw error;
|
||||
});
|
||||
}, [globalParameters, updateDashboard]);
|
||||
|
||||
const archiveDashboard = useCallback(() => {
|
||||
recordEvent("archive", "dashboard", dashboard.id);
|
||||
Dashboard.delete(dashboard).then(updatedDashboard =>
|
||||
setDashboard(currentDashboard => extend({}, currentDashboard, pick(updatedDashboard, ["is_archived"])))
|
||||
Dashboard.delete(dashboard).then((updatedDashboard) =>
|
||||
setDashboard((currentDashboard) => extend({}, currentDashboard, pick(updatedDashboard, ["is_archived"])))
|
||||
);
|
||||
}, [dashboard]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
const showShareDashboardDialog = useCallback(() => {
|
||||
const handleDialogClose = () => setDashboard(currentDashboard => extend({}, currentDashboard));
|
||||
const handleDialogClose = () => setDashboard((currentDashboard) => extend({}, currentDashboard));
|
||||
|
||||
ShareDashboardDialog.showModal({
|
||||
dashboard,
|
||||
@@ -175,8 +190,8 @@ function useDashboard(dashboardData) {
|
||||
const showAddTextboxDialog = useCallback(() => {
|
||||
TextboxDialog.showModal({
|
||||
isNew: true,
|
||||
}).onClose(text =>
|
||||
dashboard.addWidget(text).then(() => setDashboard(currentDashboard => extend({}, currentDashboard)))
|
||||
}).onClose((text) =>
|
||||
dashboard.addWidget(text).then(() => setDashboard((currentDashboard) => extend({}, currentDashboard)))
|
||||
);
|
||||
}, [dashboard]);
|
||||
|
||||
@@ -188,13 +203,13 @@ function useDashboard(dashboardData) {
|
||||
.addWidget(visualization, {
|
||||
parameterMappings: editableMappingsToParameterMappings(parameterMappings),
|
||||
})
|
||||
.then(widget => {
|
||||
.then((widget) => {
|
||||
const widgetsToSave = [
|
||||
widget,
|
||||
...synchronizeWidgetTitles(widget.options.parameterMappings, dashboard.widgets),
|
||||
];
|
||||
return Promise.all(widgetsToSave.map(w => w.save())).then(() =>
|
||||
setDashboard(currentDashboard => extend({}, currentDashboard))
|
||||
return Promise.all(widgetsToSave.map((w) => w.save())).then(() =>
|
||||
setDashboard((currentDashboard) => extend({}, currentDashboard))
|
||||
);
|
||||
})
|
||||
);
|
||||
@@ -238,6 +253,7 @@ function useDashboard(dashboardData) {
|
||||
setRefreshRate,
|
||||
disableRefreshRate,
|
||||
...editModeHandler,
|
||||
saveDashboardParameters,
|
||||
gridDisabled,
|
||||
setGridDisabled,
|
||||
fullscreen,
|
||||
|
||||
@@ -6,6 +6,7 @@ import Link from "@/components/Link";
|
||||
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
||||
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import BeaconConsent from "@/components/BeaconConsent";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
import { axios } from "@/services/axios";
|
||||
@@ -30,7 +31,8 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
<Link
|
||||
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer">
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Read more
|
||||
</Link>
|
||||
.
|
||||
@@ -42,7 +44,7 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
|
||||
function EmailNotVerifiedAlert() {
|
||||
const verifyEmail = () => {
|
||||
axios.post("verification_email/").then(data => {
|
||||
axios.post("verification_email/").then((data) => {
|
||||
notification.success(data.message);
|
||||
});
|
||||
};
|
||||
@@ -88,6 +90,7 @@ export default function Home() {
|
||||
</DynamicComponent>
|
||||
<DynamicComponent name="HomeExtra" />
|
||||
<DashboardAndQueryFavoritesList />
|
||||
<BeaconConsent />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -98,6 +101,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/",
|
||||
title: "Redash",
|
||||
render: pageProps => <Home {...pageProps} />,
|
||||
render: (pageProps) => <Home {...pageProps} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -15,7 +15,7 @@ export function FavoriteList({ title, resource, itemUrl, emptyState }) {
|
||||
useEffect(() => {
|
||||
setLoading(true);
|
||||
resource
|
||||
.favorites()
|
||||
.favorites({ order: "-starred_at" })
|
||||
.then(({ results }) => setItems(results))
|
||||
.finally(() => setLoading(false));
|
||||
}, [resource]);
|
||||
@@ -28,7 +28,7 @@ export function FavoriteList({ title, resource, itemUrl, emptyState }) {
|
||||
</div>
|
||||
{!isEmpty(items) && (
|
||||
<div role="list" className="list-group">
|
||||
{items.map(item => (
|
||||
{items.map((item) => (
|
||||
<Link key={itemUrl(item)} role="listitem" className="list-group-item" href={itemUrl(item)}>
|
||||
<span className="btn-favorite m-r-5">
|
||||
<i className="fa fa-star" aria-hidden="true" />
|
||||
@@ -61,7 +61,7 @@ export function DashboardAndQueryFavoritesList() {
|
||||
<FavoriteList
|
||||
title="Favorite Dashboards"
|
||||
resource={Dashboard}
|
||||
itemUrl={dashboard => dashboard.url}
|
||||
itemUrl={(dashboard) => dashboard.url}
|
||||
emptyState={
|
||||
<p>
|
||||
<span className="btn-favorite m-r-5">
|
||||
@@ -76,7 +76,7 @@ export function DashboardAndQueryFavoritesList() {
|
||||
<FavoriteList
|
||||
title="Favorite Queries"
|
||||
resource={Query}
|
||||
itemUrl={query => `queries/${query.id}`}
|
||||
itemUrl={(query) => `queries/${query.id}`}
|
||||
emptyState={
|
||||
<p>
|
||||
<span className="btn-favorite m-r-5">
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useEffect, useRef } from "react";
|
||||
import React, { useCallback, useEffect, useRef } from "react";
|
||||
import cx from "classnames";
|
||||
|
||||
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
||||
@@ -20,7 +20,7 @@ import ItemsTable, { Columns } from "@/components/items-list/components/ItemsTab
|
||||
import Layout from "@/components/layouts/ContentWithSidebar";
|
||||
|
||||
import { Query } from "@/services/query";
|
||||
import { currentUser } from "@/services/auth";
|
||||
import { clientConfig, currentUser } from "@/services/auth";
|
||||
import location from "@/services/location";
|
||||
import routes from "@/services/routes";
|
||||
|
||||
@@ -95,25 +95,39 @@ function QueriesList({ controller }) {
|
||||
const controllerRef = useRef();
|
||||
controllerRef.current = controller;
|
||||
|
||||
const updateSearch = useCallback(
|
||||
(searchTemm) => {
|
||||
controller.updateSearch(searchTemm, { isServerSideFTS: !clientConfig.multiByteSearchEnabled });
|
||||
},
|
||||
[controller]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const unlistenLocationChanges = location.listen((unused, action) => {
|
||||
const searchTerm = location.search.q || "";
|
||||
if (action === "PUSH" && searchTerm !== controllerRef.current.searchTerm) {
|
||||
controllerRef.current.updateSearch(searchTerm);
|
||||
updateSearch(searchTerm);
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
unlistenLocationChanges();
|
||||
};
|
||||
}, []);
|
||||
}, [updateSearch]);
|
||||
|
||||
let usedListColumns = listColumns;
|
||||
if (controller.params.currentPage === "favorites") {
|
||||
usedListColumns = [
|
||||
...usedListColumns,
|
||||
Columns.dateTime.sortable({ title: "Starred At", field: "starred_at", width: "1%" }),
|
||||
];
|
||||
}
|
||||
const {
|
||||
areExtraActionsAvailable,
|
||||
listColumns: tableColumns,
|
||||
Component: ExtraActionsComponent,
|
||||
selectedItems,
|
||||
} = useItemsListExtraActions(controller, listColumns, QueriesListExtraActions);
|
||||
} = useItemsListExtraActions(controller, usedListColumns, QueriesListExtraActions);
|
||||
|
||||
return (
|
||||
<div className="page-queries-list">
|
||||
@@ -135,7 +149,7 @@ function QueriesList({ controller }) {
|
||||
placeholder="Search Queries..."
|
||||
label="Search queries"
|
||||
value={controller.searchTerm}
|
||||
onChange={controller.updateSearch}
|
||||
onChange={updateSearch}
|
||||
/>
|
||||
<Sidebar.Menu items={sidebarMenu} selected={controller.params.currentPage} />
|
||||
<Sidebar.Tags url="api/queries/tags" onChange={controller.updateSelectedTags} showUnselectAll />
|
||||
@@ -160,14 +174,15 @@ function QueriesList({ controller }) {
|
||||
orderByField={controller.orderByField}
|
||||
orderByReverse={controller.orderByReverse}
|
||||
toggleSorting={controller.toggleSorting}
|
||||
setSorting={controller.setSorting}
|
||||
/>
|
||||
<Paginator
|
||||
showPageSizeSelect
|
||||
totalCount={controller.totalItemsCount}
|
||||
pageSize={controller.itemsPerPage}
|
||||
onPageSizeChange={itemsPerPage => controller.updatePagination({ itemsPerPage })}
|
||||
onPageSizeChange={(itemsPerPage) => controller.updatePagination({ itemsPerPage })}
|
||||
page={controller.page}
|
||||
onChange={page => controller.updatePagination({ page })}
|
||||
onChange={(page) => controller.updatePagination({ page })}
|
||||
/>
|
||||
</div>
|
||||
</React.Fragment>
|
||||
@@ -196,10 +211,10 @@ const QueriesListPage = itemsList(
|
||||
}[currentPage];
|
||||
},
|
||||
getItemProcessor() {
|
||||
return item => new Query(item);
|
||||
return (item) => new Query(item);
|
||||
},
|
||||
}),
|
||||
() => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true })
|
||||
({ ...props }) => new UrlStateStorage({ orderByField: props.orderByField ?? "created_at", orderByReverse: true })
|
||||
);
|
||||
|
||||
routes.register(
|
||||
@@ -207,7 +222,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries",
|
||||
title: "Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="all" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="all" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -215,7 +230,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/favorites",
|
||||
title: "Favorite Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="favorites" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="favorites" orderByField="starred_at" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -223,7 +238,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/archive",
|
||||
title: "Archived Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="archive" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="archive" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -231,6 +246,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/my",
|
||||
title: "My Queries",
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="my" />,
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="my" />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -9,6 +9,7 @@ import QueryControlDropdown from "@/components/EditVisualizationButton/QueryCont
|
||||
import EditVisualizationButton from "@/components/EditVisualizationButton";
|
||||
import useQueryResultData from "@/lib/useQueryResultData";
|
||||
import { durationHumanize, pluralize, prettySize } from "@/lib/utils";
|
||||
import { isUndefined } from "lodash";
|
||||
|
||||
import "./QueryExecutionMetadata.less";
|
||||
|
||||
@@ -51,7 +52,8 @@ export default function QueryExecutionMetadata({
|
||||
"Result truncated to " +
|
||||
queryResultData.rows.length +
|
||||
" rows. Databricks may truncate query results that are unstably large."
|
||||
}>
|
||||
}
|
||||
>
|
||||
<WarningTwoTone twoToneColor="#FF9800" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
@@ -67,10 +69,9 @@ export default function QueryExecutionMetadata({
|
||||
)}
|
||||
{isQueryExecuting && <span>Running…</span>}
|
||||
</span>
|
||||
{queryResultData.metadata.data_scanned && (
|
||||
{!isUndefined(queryResultData.metadata.data_scanned) && !isQueryExecuting && (
|
||||
<span className="m-l-5">
|
||||
Data Scanned
|
||||
<strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
Data Scanned <strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
|
||||
@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
|
||||
import React from "react";
|
||||
|
||||
export function QuerySourceTypeIcon(props) {
|
||||
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
}
|
||||
|
||||
QuerySourceTypeIcon.propTypes = {
|
||||
|
||||
@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
|
||||
<div className="query-results-empty-state">
|
||||
<div className="empty-state-content">
|
||||
<div>
|
||||
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
||||
<img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
||||
</div>
|
||||
<h3>{title}</h3>
|
||||
<div className="m-b-20">{message}</div>
|
||||
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
|
||||
|
||||
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
|
||||
const handleDelete = useCallback(
|
||||
e => {
|
||||
(e) => {
|
||||
e.stopPropagation();
|
||||
Modal.confirm({
|
||||
title: "Delete Visualization",
|
||||
@@ -111,7 +111,8 @@ export default function QueryVisualizationTabs({
|
||||
className="add-visualization-button"
|
||||
data-test="NewVisualization"
|
||||
type="link"
|
||||
onClick={() => onAddVisualization()}>
|
||||
onClick={() => onAddVisualization()}
|
||||
>
|
||||
<i className="fa fa-plus" aria-hidden="true" />
|
||||
<span className="m-l-5 hidden-xs">Add Visualization</span>
|
||||
</Button>
|
||||
@@ -119,7 +120,7 @@ export default function QueryVisualizationTabs({
|
||||
}
|
||||
|
||||
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
|
||||
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||
const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||
const isMobile = useMedia({ maxWidth: 768 });
|
||||
|
||||
const [filters, setFilters] = useState([]);
|
||||
@@ -132,9 +133,10 @@ export default function QueryVisualizationTabs({
|
||||
data-test="QueryPageVisualizationTabs"
|
||||
animated={false}
|
||||
tabBarGutter={0}
|
||||
onChange={activeKey => onChangeTab(+activeKey)}
|
||||
destroyInactiveTabPane>
|
||||
{orderedVisualizations.map(visualization => (
|
||||
onChange={(activeKey) => onChangeTab(+activeKey)}
|
||||
destroyInactiveTabPane
|
||||
>
|
||||
{orderedVisualizations.map((visualization) => (
|
||||
<TabPane
|
||||
key={`${visualization.id}`}
|
||||
tab={
|
||||
@@ -144,7 +146,8 @@ export default function QueryVisualizationTabs({
|
||||
visualizationName={visualization.name}
|
||||
onDelete={() => onDeleteVisualization(visualization.id)}
|
||||
/>
|
||||
}>
|
||||
}
|
||||
>
|
||||
{queryResult ? (
|
||||
<VisualizationRenderer
|
||||
visualization={visualization}
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { reduce } from "lodash";
|
||||
import localOptions from "@/lib/localOptions";
|
||||
|
||||
function calculateTokensCount(schema) {
|
||||
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
|
||||
}
|
||||
|
||||
export default function useAutocompleteFlags(schema) {
|
||||
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]);
|
||||
const isAvailable = true;
|
||||
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
|
||||
|
||||
const toggleAutocomplete = useCallback(state => {
|
||||
const toggleAutocomplete = useCallback((state) => {
|
||||
setIsEnabled(state);
|
||||
localOptions.set("liveAutocomplete", state);
|
||||
}, []);
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
import React from "react";
|
||||
import Form from "antd/lib/form";
|
||||
import Checkbox from "antd/lib/checkbox";
|
||||
import Skeleton from "antd/lib/skeleton";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
|
||||
|
||||
export default function BeaconConsentSettings(props) {
|
||||
const { values, onChange, loading } = props;
|
||||
|
||||
return (
|
||||
<DynamicComponent name="OrganizationSettings.BeaconConsentSettings" {...props}>
|
||||
<Form.Item
|
||||
label={
|
||||
<span>
|
||||
Anonymous Usage Data Sharing
|
||||
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||
</span>
|
||||
}
|
||||
>
|
||||
{loading ? (
|
||||
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||
) : (
|
||||
<Checkbox
|
||||
name="beacon_consent"
|
||||
checked={values.beacon_consent}
|
||||
onChange={(e) => onChange({ beacon_consent: e.target.checked })}
|
||||
>
|
||||
Help Redash improve by automatically sending anonymous usage data
|
||||
</Checkbox>
|
||||
)}
|
||||
</Form.Item>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
|
||||
|
||||
BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;
|
||||
@@ -4,6 +4,7 @@ import DynamicComponent from "@/components/DynamicComponent";
|
||||
import FormatSettings from "./FormatSettings";
|
||||
import PlotlySettings from "./PlotlySettings";
|
||||
import FeatureFlagsSettings from "./FeatureFlagsSettings";
|
||||
import BeaconConsentSettings from "./BeaconConsentSettings";
|
||||
|
||||
export default function GeneralSettings(props) {
|
||||
return (
|
||||
@@ -13,6 +14,7 @@ export default function GeneralSettings(props) {
|
||||
<FormatSettings {...props} />
|
||||
<PlotlySettings {...props} />
|
||||
<FeatureFlagsSettings {...props} />
|
||||
<BeaconConsentSettings {...props} />
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,9 +10,9 @@ export const urlForDashboard = ({ id, slug }) => `dashboards/${id}-${slug}`;
|
||||
|
||||
export function collectDashboardFilters(dashboard, queryResults, urlParams) {
|
||||
const filters = {};
|
||||
_.each(queryResults, queryResult => {
|
||||
_.each(queryResults, (queryResult) => {
|
||||
const queryFilters = queryResult && queryResult.getFilters ? queryResult.getFilters() : [];
|
||||
_.each(queryFilters, queryFilter => {
|
||||
_.each(queryFilters, (queryFilter) => {
|
||||
const hasQueryStringValue = _.has(urlParams, queryFilter.name);
|
||||
|
||||
if (!(hasQueryStringValue || dashboard.dashboard_filters_enabled)) {
|
||||
@@ -44,7 +44,7 @@ function prepareWidgetsForDashboard(widgets) {
|
||||
const defaultWidgetSizeY =
|
||||
Math.max(
|
||||
_.chain(widgets)
|
||||
.map(w => w.options.position.sizeY)
|
||||
.map((w) => w.options.position.sizeY)
|
||||
.max()
|
||||
.value(),
|
||||
20
|
||||
@@ -55,11 +55,11 @@ function prepareWidgetsForDashboard(widgets) {
|
||||
// 2. update position of widgets in each row - place it right below
|
||||
// biggest widget from previous row
|
||||
_.chain(widgets)
|
||||
.sortBy(widget => widget.options.position.row)
|
||||
.groupBy(widget => widget.options.position.row)
|
||||
.sortBy((widget) => widget.options.position.row)
|
||||
.groupBy((widget) => widget.options.position.row)
|
||||
.reduce((row, widgetsAtRow) => {
|
||||
let height = 1;
|
||||
_.each(widgetsAtRow, widget => {
|
||||
_.each(widgetsAtRow, (widget) => {
|
||||
height = Math.max(
|
||||
height,
|
||||
widget.options.position.autoHeight ? defaultWidgetSizeY : widget.options.position.sizeY
|
||||
@@ -74,8 +74,8 @@ function prepareWidgetsForDashboard(widgets) {
|
||||
.value();
|
||||
|
||||
// Sort widgets by updated column and row value
|
||||
widgets = _.sortBy(widgets, widget => widget.options.position.col);
|
||||
widgets = _.sortBy(widgets, widget => widget.options.position.row);
|
||||
widgets = _.sortBy(widgets, (widget) => widget.options.position.col);
|
||||
widgets = _.sortBy(widgets, (widget) => widget.options.position.row);
|
||||
|
||||
return widgets;
|
||||
}
|
||||
@@ -85,7 +85,7 @@ function calculateNewWidgetPosition(existingWidgets, newWidget) {
|
||||
|
||||
// Find first free row for each column
|
||||
const bottomLine = _.chain(existingWidgets)
|
||||
.map(w => {
|
||||
.map((w) => {
|
||||
const options = _.extend({}, w.options);
|
||||
const position = _.extend({ row: 0, sizeY: 0 }, options.position);
|
||||
return {
|
||||
@@ -97,21 +97,24 @@ function calculateNewWidgetPosition(existingWidgets, newWidget) {
|
||||
height: position.sizeY,
|
||||
};
|
||||
})
|
||||
.reduce((result, item) => {
|
||||
const from = Math.max(item.left, 0);
|
||||
const to = Math.min(item.right, result.length + 1);
|
||||
for (let i = from; i < to; i += 1) {
|
||||
result[i] = Math.max(result[i], item.bottom);
|
||||
}
|
||||
return result;
|
||||
}, _.map(new Array(dashboardGridOptions.columns), _.constant(0)))
|
||||
.reduce(
|
||||
(result, item) => {
|
||||
const from = Math.max(item.left, 0);
|
||||
const to = Math.min(item.right, result.length + 1);
|
||||
for (let i = from; i < to; i += 1) {
|
||||
result[i] = Math.max(result[i], item.bottom);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
_.map(new Array(dashboardGridOptions.columns), _.constant(0))
|
||||
)
|
||||
.value();
|
||||
|
||||
// Go through columns, pick them by count necessary to hold new block,
|
||||
// and calculate bottom-most free row per group.
|
||||
// Choose group with the top-most free row (comparing to other groups)
|
||||
return _.chain(_.range(0, dashboardGridOptions.columns - width + 1))
|
||||
.map(col => ({
|
||||
.map((col) => ({
|
||||
col,
|
||||
row: _.chain(bottomLine)
|
||||
.slice(col, col + width)
|
||||
@@ -126,14 +129,14 @@ function calculateNewWidgetPosition(existingWidgets, newWidget) {
|
||||
export function Dashboard(dashboard) {
|
||||
_.extend(this, dashboard);
|
||||
Object.defineProperty(this, "url", {
|
||||
get: function() {
|
||||
get: function () {
|
||||
return urlForDashboard(this);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function prepareDashboardWidgets(widgets) {
|
||||
return prepareWidgetsForDashboard(_.map(widgets, widget => new Widget(widget)));
|
||||
return prepareWidgetsForDashboard(_.map(widgets, (widget) => new Widget(widget)));
|
||||
}
|
||||
|
||||
function transformSingle(dashboard) {
|
||||
@@ -154,7 +157,7 @@ function transformResponse(data) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const saveOrCreateUrl = data => (data.id ? `api/dashboards/${data.id}` : "api/dashboards");
|
||||
const saveOrCreateUrl = (data) => (data.id ? `api/dashboards/${data.id}` : "api/dashboards");
|
||||
const DashboardService = {
|
||||
get: ({ id, slug }) => {
|
||||
const params = {};
|
||||
@@ -164,12 +167,12 @@ const DashboardService = {
|
||||
return axios.get(`api/dashboards/${id || slug}`, { params }).then(transformResponse);
|
||||
},
|
||||
getByToken: ({ token }) => axios.get(`api/dashboards/public/${token}`).then(transformResponse),
|
||||
save: data => axios.post(saveOrCreateUrl(data), data).then(transformResponse),
|
||||
save: (data) => axios.post(saveOrCreateUrl(data), data).then(transformResponse),
|
||||
delete: ({ id }) => axios.delete(`api/dashboards/${id}`).then(transformResponse),
|
||||
query: params => axios.get("api/dashboards", { params }).then(transformResponse),
|
||||
recent: params => axios.get("api/dashboards/recent", { params }).then(transformResponse),
|
||||
myDashboards: params => axios.get("api/dashboards/my", { params }).then(transformResponse),
|
||||
favorites: params => axios.get("api/dashboards/favorites", { params }).then(transformResponse),
|
||||
query: (params) => axios.get("api/dashboards", { params }).then(transformResponse),
|
||||
recent: (params) => axios.get("api/dashboards/recent", { params }).then(transformResponse),
|
||||
myDashboards: (params) => axios.get("api/dashboards/my", { params }).then(transformResponse),
|
||||
favorites: (params) => axios.get("api/dashboards/favorites", { params }).then(transformResponse),
|
||||
favorite: ({ id }) => axios.post(`api/dashboards/${id}/favorite`),
|
||||
unfavorite: ({ id }) => axios.delete(`api/dashboards/${id}/favorite`),
|
||||
fork: ({ id }) => axios.post(`api/dashboards/${id}/fork`, { id }).then(transformResponse),
|
||||
@@ -187,13 +190,13 @@ Dashboard.prototype.canEdit = function canEdit() {
|
||||
Dashboard.prototype.getParametersDefs = function getParametersDefs() {
|
||||
const globalParams = {};
|
||||
const queryParams = location.search;
|
||||
_.each(this.widgets, widget => {
|
||||
_.each(this.widgets, (widget) => {
|
||||
if (widget.getQuery()) {
|
||||
const mappings = widget.getParameterMappings();
|
||||
widget
|
||||
.getQuery()
|
||||
.getParametersDefs(false)
|
||||
.forEach(param => {
|
||||
.forEach((param) => {
|
||||
const mapping = mappings[param.name];
|
||||
if (mapping.type === Widget.MappingType.DashboardLevel) {
|
||||
// create global param
|
||||
@@ -210,15 +213,19 @@ Dashboard.prototype.getParametersDefs = function getParametersDefs() {
|
||||
});
|
||||
}
|
||||
});
|
||||
const mergedValues = {
|
||||
..._.mapValues(globalParams, (p) => p.value),
|
||||
...Object.fromEntries((this.options.parameters || []).map((param) => [param.name, param.value])),
|
||||
};
|
||||
const resultingGlobalParams = _.values(
|
||||
_.each(globalParams, param => {
|
||||
param.setValue(param.value); // apply global param value to all locals
|
||||
param.fromUrlParams(queryParams); // try to initialize from url (may do nothing)
|
||||
_.each(globalParams, (param) => {
|
||||
param.setValue(mergedValues[param.name]); // apply merged value
|
||||
param.fromUrlParams(queryParams); // allow param-specific parsing logic
|
||||
})
|
||||
);
|
||||
|
||||
// order dashboard params using paramOrder
|
||||
return _.sortBy(resultingGlobalParams, param =>
|
||||
return _.sortBy(resultingGlobalParams, (param) =>
|
||||
_.includes(this.options.globalParamOrder, param.name)
|
||||
? _.indexOf(this.options.globalParamOrder, param.name)
|
||||
: _.size(this.options.globalParamOrder)
|
||||
|
||||
@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
|
||||
|
||||
export const SCHEMA_NOT_SUPPORTED = 1;
|
||||
export const SCHEMA_LOAD_ERROR = 2;
|
||||
export const IMG_ROOT = "static/images/db-logos";
|
||||
export const IMG_ROOT = "/static/images/db-logos";
|
||||
|
||||
function mapSchemaColumnsToObject(columns) {
|
||||
return map(columns, column => (isObject(column) ? column : { name: column }));
|
||||
return map(columns, (column) => (isObject(column) ? column : { name: column }));
|
||||
}
|
||||
|
||||
const DataSource = {
|
||||
query: () => axios.get("api/data_sources"),
|
||||
get: ({ id }) => axios.get(`api/data_sources/${id}`),
|
||||
types: () => axios.get("api/data_sources/types"),
|
||||
create: data => axios.post(`api/data_sources`, data),
|
||||
save: data => axios.post(`api/data_sources/${data.id}`, data),
|
||||
test: data => axios.post(`api/data_sources/${data.id}/test`),
|
||||
create: (data) => axios.post(`api/data_sources`, data),
|
||||
save: (data) => axios.post(`api/data_sources/${data.id}`, data),
|
||||
test: (data) => axios.post(`api/data_sources/${data.id}/test`),
|
||||
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
|
||||
fetchSchema: (data, refresh = false) => {
|
||||
const params = {};
|
||||
@@ -27,15 +27,15 @@ const DataSource = {
|
||||
|
||||
return axios
|
||||
.get(`api/data_sources/${data.id}/schema`, { params })
|
||||
.then(data => {
|
||||
.then((data) => {
|
||||
if (has(data, "job")) {
|
||||
return fetchDataFromJob(data.job.id).catch(error =>
|
||||
return fetchDataFromJob(data.job.id).catch((error) =>
|
||||
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
|
||||
);
|
||||
}
|
||||
return has(data, "schema") ? data.schema : Promise.reject();
|
||||
})
|
||||
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
||||
.then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ function normalizeLocation(rawLocation) {
|
||||
const result = {};
|
||||
|
||||
result.path = pathname;
|
||||
result.search = mapValues(qs.parse(search), value => (isNil(value) ? true : value));
|
||||
result.search = mapValues(qs.parse(search), (value) => (isNil(value) ? true : value));
|
||||
result.hash = trimStart(hash, "#");
|
||||
result.url = `${pathname}${search}${hash}`;
|
||||
|
||||
@@ -27,7 +27,7 @@ const location = {
|
||||
|
||||
confirmChange(handler) {
|
||||
if (isFunction(handler)) {
|
||||
return history.block(nextLocation => {
|
||||
return history.block((nextLocation) => {
|
||||
return handler(normalizeLocation(nextLocation), location);
|
||||
});
|
||||
} else {
|
||||
@@ -60,12 +60,18 @@ const location = {
|
||||
// serialize search and keep existing search parameters (!)
|
||||
if (isObject(newLocation.search)) {
|
||||
newLocation.search = omitBy(extend({}, location.search, newLocation.search), isNil);
|
||||
newLocation.search = mapValues(newLocation.search, value => (value === true ? null : value));
|
||||
newLocation.search = mapValues(newLocation.search, (value) => (value === true ? null : value));
|
||||
newLocation.search = qs.stringify(newLocation.search);
|
||||
}
|
||||
}
|
||||
if (replace) {
|
||||
history.replace(newLocation);
|
||||
if (
|
||||
newLocation.pathname !== location.path ||
|
||||
newLocation.search !== qs.stringify(location.search) ||
|
||||
newLocation.hash !== location.hash
|
||||
) {
|
||||
history.replace(newLocation);
|
||||
}
|
||||
} else {
|
||||
history.push(newLocation);
|
||||
}
|
||||
|
||||
@@ -17,7 +17,9 @@ const DYNAMIC_PREFIX = "d_";
|
||||
* @param now {function(): moment.Moment=} moment - defaults to now
|
||||
* @returns {function(withNow: boolean): [moment.Moment, moment.Moment|undefined]}
|
||||
*/
|
||||
const untilNow = (from, now = () => moment()) => (withNow = true) => [from(), withNow ? now() : undefined];
|
||||
const untilNow =
|
||||
(from, now = () => moment()) =>
|
||||
(withNow = true) => [from(), withNow ? now() : undefined];
|
||||
|
||||
const DYNAMIC_DATE_RANGES = {
|
||||
today: {
|
||||
@@ -26,14 +28,7 @@ const DYNAMIC_DATE_RANGES = {
|
||||
},
|
||||
yesterday: {
|
||||
name: "Yesterday",
|
||||
value: () => [
|
||||
moment()
|
||||
.subtract(1, "day")
|
||||
.startOf("day"),
|
||||
moment()
|
||||
.subtract(1, "day")
|
||||
.endOf("day"),
|
||||
],
|
||||
value: () => [moment().subtract(1, "day").startOf("day"), moment().subtract(1, "day").endOf("day")],
|
||||
},
|
||||
this_week: {
|
||||
name: "This week",
|
||||
@@ -49,36 +44,15 @@ const DYNAMIC_DATE_RANGES = {
|
||||
},
|
||||
last_week: {
|
||||
name: "Last week",
|
||||
value: () => [
|
||||
moment()
|
||||
.subtract(1, "week")
|
||||
.startOf("week"),
|
||||
moment()
|
||||
.subtract(1, "week")
|
||||
.endOf("week"),
|
||||
],
|
||||
value: () => [moment().subtract(1, "week").startOf("week"), moment().subtract(1, "week").endOf("week")],
|
||||
},
|
||||
last_month: {
|
||||
name: "Last month",
|
||||
value: () => [
|
||||
moment()
|
||||
.subtract(1, "month")
|
||||
.startOf("month"),
|
||||
moment()
|
||||
.subtract(1, "month")
|
||||
.endOf("month"),
|
||||
],
|
||||
value: () => [moment().subtract(1, "month").startOf("month"), moment().subtract(1, "month").endOf("month")],
|
||||
},
|
||||
last_year: {
|
||||
name: "Last year",
|
||||
value: () => [
|
||||
moment()
|
||||
.subtract(1, "year")
|
||||
.startOf("year"),
|
||||
moment()
|
||||
.subtract(1, "year")
|
||||
.endOf("year"),
|
||||
],
|
||||
value: () => [moment().subtract(1, "year").startOf("year"), moment().subtract(1, "year").endOf("year")],
|
||||
},
|
||||
last_hour: {
|
||||
name: "Last hour",
|
||||
@@ -94,63 +68,31 @@ const DYNAMIC_DATE_RANGES = {
|
||||
},
|
||||
last_7_days: {
|
||||
name: "Last 7 days",
|
||||
value: untilNow(
|
||||
() =>
|
||||
moment()
|
||||
.subtract(7, "days")
|
||||
.startOf("day"),
|
||||
() => moment().endOf("day")
|
||||
),
|
||||
value: untilNow(() => moment().subtract(7, "days").startOf("day")),
|
||||
},
|
||||
last_14_days: {
|
||||
name: "Last 14 days",
|
||||
value: untilNow(
|
||||
() =>
|
||||
moment()
|
||||
.subtract(14, "days")
|
||||
.startOf("day"),
|
||||
() => moment().endOf("day")
|
||||
),
|
||||
value: untilNow(() => moment().subtract(14, "days").startOf("day")),
|
||||
},
|
||||
last_30_days: {
|
||||
name: "Last 30 days",
|
||||
value: untilNow(
|
||||
() =>
|
||||
moment()
|
||||
.subtract(30, "days")
|
||||
.startOf("day"),
|
||||
() => moment().endOf("day")
|
||||
),
|
||||
value: untilNow(() => moment().subtract(30, "days").startOf("day")),
|
||||
},
|
||||
last_60_days: {
|
||||
name: "Last 60 days",
|
||||
value: untilNow(
|
||||
() =>
|
||||
moment()
|
||||
.subtract(60, "days")
|
||||
.startOf("day"),
|
||||
() => moment().endOf("day")
|
||||
),
|
||||
value: untilNow(() => moment().subtract(60, "days").startOf("day")),
|
||||
},
|
||||
last_90_days: {
|
||||
name: "Last 90 days",
|
||||
value: untilNow(
|
||||
() =>
|
||||
moment()
|
||||
.subtract(90, "days")
|
||||
.startOf("day"),
|
||||
() => moment().endOf("day")
|
||||
),
|
||||
value: untilNow(() => moment().subtract(90, "days").startOf("day")),
|
||||
},
|
||||
last_12_months: {
|
||||
name: "Last 12 months",
|
||||
value: untilNow(
|
||||
() =>
|
||||
moment()
|
||||
.subtract(12, "months")
|
||||
.startOf("day"),
|
||||
() => moment().endOf("day")
|
||||
),
|
||||
value: untilNow(() => moment().subtract(12, "months").startOf("day")),
|
||||
},
|
||||
last_10_years: {
|
||||
name: "Last 10 years",
|
||||
value: untilNow(() => moment().subtract(10, "years").startOf("day")),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -164,7 +106,7 @@ export function isDynamicDateRangeString(value) {
|
||||
}
|
||||
|
||||
export function getDynamicDateRangeStringFromName(dynamicRangeName) {
|
||||
const key = findKey(DYNAMIC_DATE_RANGES, range => range.name === dynamicRangeName);
|
||||
const key = findKey(DYNAMIC_DATE_RANGES, (range) => range.name === dynamicRangeName);
|
||||
return key ? DYNAMIC_PREFIX + key : undefined;
|
||||
}
|
||||
|
||||
@@ -233,7 +175,7 @@ class DateRangeParameter extends Parameter {
|
||||
|
||||
getExecutionValue() {
|
||||
if (this.hasDynamicValue) {
|
||||
const format = date => date.format(DATETIME_FORMATS[this.type]);
|
||||
const format = (date) => date.format(DATETIME_FORMATS[this.type]);
|
||||
const [start, end] = this.normalizedValue.value().map(format);
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ class Parameter {
|
||||
|
||||
updateLocals() {
|
||||
if (isArray(this.locals)) {
|
||||
each(this.locals, local => {
|
||||
each(this.locals, (local) => {
|
||||
local.setValue(this.value);
|
||||
});
|
||||
}
|
||||
@@ -117,7 +117,7 @@ class Parameter {
|
||||
|
||||
/** Get a saveable version of the Parameter by omitting unnecessary props */
|
||||
toSaveableObject() {
|
||||
return omit(this, ["$$value", "urlPrefix", "pendingValue", "parentQueryId"]);
|
||||
return omit(this, ["$$value", "urlPrefix", "pendingValue", "parentQueryId", "locals"]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ const logger = debug("redash:services:QueryResult");
|
||||
const filterTypes = ["filter", "multi-filter", "multiFilter"];
|
||||
|
||||
function defer() {
|
||||
const result = { onStatusChange: status => {} };
|
||||
const result = { onStatusChange: (status) => {} };
|
||||
result.promise = new Promise((resolve, reject) => {
|
||||
result.resolve = resolve;
|
||||
result.reject = reject;
|
||||
@@ -40,13 +40,13 @@ function getColumnNameWithoutType(column) {
|
||||
}
|
||||
|
||||
function getColumnFriendlyName(column) {
|
||||
return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, a => a.toUpperCase());
|
||||
return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, (a) => a.toUpperCase());
|
||||
}
|
||||
|
||||
const createOrSaveUrl = data => (data.id ? `api/query_results/${data.id}` : "api/query_results");
|
||||
const createOrSaveUrl = (data) => (data.id ? `api/query_results/${data.id}` : "api/query_results");
|
||||
const QueryResultResource = {
|
||||
get: ({ id }) => axios.get(`api/query_results/${id}`),
|
||||
post: data => axios.post(createOrSaveUrl(data), data),
|
||||
post: (data) => axios.post(createOrSaveUrl(data), data),
|
||||
};
|
||||
|
||||
export const ExecutionStatus = {
|
||||
@@ -97,11 +97,11 @@ function handleErrorResponse(queryResult, error) {
|
||||
}
|
||||
|
||||
function sleep(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
return axios.get(`api/jobs/${jobId}`).then(data => {
|
||||
return axios.get(`api/jobs/${jobId}`).then((data) => {
|
||||
const status = statuses[data.job.status];
|
||||
if (status === ExecutionStatus.WAITING || status === ExecutionStatus.PROCESSING) {
|
||||
return sleep(interval).then(() => fetchDataFromJob(data.job.id));
|
||||
@@ -146,7 +146,7 @@ class QueryResult {
|
||||
// TODO: we should stop manipulating incoming data, and switch to relaying
|
||||
// on the column type set by the backend. This logic is prone to errors,
|
||||
// and better be removed. Kept for now, for backward compatability.
|
||||
each(this.query_result.data.rows, row => {
|
||||
each(this.query_result.data.rows, (row) => {
|
||||
forOwn(row, (v, k) => {
|
||||
let newType = null;
|
||||
if (isNumber(v)) {
|
||||
@@ -173,7 +173,7 @@ class QueryResult {
|
||||
});
|
||||
});
|
||||
|
||||
each(this.query_result.data.columns, column => {
|
||||
each(this.query_result.data.columns, (column) => {
|
||||
column.name = "" + column.name;
|
||||
if (columnTypes[column.name]) {
|
||||
if (column.type == null || column.type === "string") {
|
||||
@@ -265,14 +265,14 @@ class QueryResult {
|
||||
|
||||
getColumnNames() {
|
||||
if (this.columnNames === undefined && this.query_result.data) {
|
||||
this.columnNames = this.query_result.data.columns.map(v => v.name);
|
||||
this.columnNames = this.query_result.data.columns.map((v) => v.name);
|
||||
}
|
||||
|
||||
return this.columnNames;
|
||||
}
|
||||
|
||||
getColumnFriendlyNames() {
|
||||
return this.getColumnNames().map(col => getColumnFriendlyName(col));
|
||||
return this.getColumnNames().map((col) => getColumnFriendlyName(col));
|
||||
}
|
||||
|
||||
getTruncated() {
|
||||
@@ -286,7 +286,7 @@ class QueryResult {
|
||||
|
||||
const filters = [];
|
||||
|
||||
this.getColumns().forEach(col => {
|
||||
this.getColumns().forEach((col) => {
|
||||
const name = col.name;
|
||||
const type = name.split("::")[1] || name.split("__")[1];
|
||||
if (includes(filterTypes, type)) {
|
||||
@@ -302,8 +302,8 @@ class QueryResult {
|
||||
}
|
||||
}, this);
|
||||
|
||||
this.getRawData().forEach(row => {
|
||||
filters.forEach(filter => {
|
||||
this.getRawData().forEach((row) => {
|
||||
filters.forEach((filter) => {
|
||||
filter.values.push(row[filter.name]);
|
||||
if (filter.values.length === 1) {
|
||||
if (filter.multiple) {
|
||||
@@ -315,8 +315,8 @@ class QueryResult {
|
||||
});
|
||||
});
|
||||
|
||||
filters.forEach(filter => {
|
||||
filter.values = uniqBy(filter.values, v => {
|
||||
filters.forEach((filter) => {
|
||||
filter.values = uniqBy(filter.values, (v) => {
|
||||
if (moment.isMoment(v)) {
|
||||
return v.unix();
|
||||
}
|
||||
@@ -345,12 +345,12 @@ class QueryResult {
|
||||
|
||||
axios
|
||||
.get(`api/queries/${queryId}/results/${id}.json`)
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
// Success handler
|
||||
queryResult.isLoadingResult = false;
|
||||
queryResult.update(response);
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
// Error handler
|
||||
queryResult.isLoadingResult = false;
|
||||
handleErrorResponse(queryResult, error);
|
||||
@@ -362,10 +362,10 @@ class QueryResult {
|
||||
loadLatestCachedResult(queryId, parameters) {
|
||||
axios
|
||||
.post(`api/queries/${queryId}/results`, { queryId, parameters })
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
this.update(response);
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
handleErrorResponse(this, error);
|
||||
});
|
||||
}
|
||||
@@ -375,11 +375,11 @@ class QueryResult {
|
||||
this.deferred.onStatusChange(ExecutionStatus.LOADING_RESULT);
|
||||
|
||||
QueryResultResource.get({ id: this.job.query_result_id })
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
this.update(response);
|
||||
this.isLoadingResult = false;
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
if (tryCount === undefined) {
|
||||
tryCount = 0;
|
||||
}
|
||||
@@ -394,9 +394,12 @@ class QueryResult {
|
||||
});
|
||||
this.isLoadingResult = false;
|
||||
} else {
|
||||
setTimeout(() => {
|
||||
this.loadResult(tryCount + 1);
|
||||
}, 1000 * Math.pow(2, tryCount));
|
||||
setTimeout(
|
||||
() => {
|
||||
this.loadResult(tryCount + 1);
|
||||
},
|
||||
1000 * Math.pow(2, tryCount)
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -410,19 +413,26 @@ class QueryResult {
|
||||
: axios.get(`api/queries/${query}/jobs/${this.job.id}`);
|
||||
|
||||
request
|
||||
.then(jobResponse => {
|
||||
.then((jobResponse) => {
|
||||
this.update(jobResponse);
|
||||
|
||||
if (this.getStatus() === "processing" && this.job.query_result_id && this.job.query_result_id !== "None") {
|
||||
loadResult();
|
||||
} else if (this.getStatus() !== "failed") {
|
||||
const waitTime = tryNumber > 10 ? 3000 : 500;
|
||||
let waitTime;
|
||||
if (tryNumber <= 10) {
|
||||
waitTime = 500;
|
||||
} else if (tryNumber <= 50) {
|
||||
waitTime = 1000;
|
||||
} else {
|
||||
waitTime = 3000;
|
||||
}
|
||||
setTimeout(() => {
|
||||
this.refreshStatus(query, parameters, tryNumber + 1);
|
||||
}, waitTime);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
logger("Connection error", error);
|
||||
// TODO: use QueryResultError, or better yet: exception/reject of promise.
|
||||
this.update({
|
||||
@@ -451,14 +461,14 @@ class QueryResult {
|
||||
|
||||
axios
|
||||
.post(`api/queries/${id}/results`, { id, parameters, apply_auto_limit: applyAutoLimit, max_age: maxAge })
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
queryResult.update(response);
|
||||
|
||||
if ("job" in response) {
|
||||
queryResult.refreshStatus(id, parameters);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
handleErrorResponse(queryResult, error);
|
||||
});
|
||||
|
||||
@@ -481,14 +491,14 @@ class QueryResult {
|
||||
}
|
||||
|
||||
QueryResultResource.post(params)
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
queryResult.update(response);
|
||||
|
||||
if ("job" in response) {
|
||||
queryResult.refreshStatus(query, parameters);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
handleErrorResponse(queryResult, error);
|
||||
});
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ function runCypressCI() {
|
||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||
} = process.env;
|
||||
|
||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
||||
if (GITHUB_REPOSITORY === "getredash/redash" && process.env.CYPRESS_RECORD_KEY) {
|
||||
process.env.CYPRESS_OPTIONS = "--record";
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ describe("Dashboard", () => {
|
||||
cy.getByTestId("DashboardSaveButton").click();
|
||||
});
|
||||
|
||||
cy.wait("@NewDashboard").then(xhr => {
|
||||
cy.wait("@NewDashboard").then((xhr) => {
|
||||
const id = Cypress._.get(xhr, "response.body.id");
|
||||
assert.isDefined(id, "Dashboard api call returns id");
|
||||
|
||||
@@ -40,13 +40,9 @@ describe("Dashboard", () => {
|
||||
|
||||
cy.getByTestId("DashboardMoreButton").click();
|
||||
|
||||
cy.getByTestId("DashboardMoreButtonMenu")
|
||||
.contains("Archive")
|
||||
.click();
|
||||
cy.getByTestId("DashboardMoreButtonMenu").contains("Archive").click();
|
||||
|
||||
cy.get(".ant-modal .ant-btn")
|
||||
.contains("Archive")
|
||||
.click({ force: true });
|
||||
cy.get(".ant-modal .ant-btn").contains("Archive").click({ force: true });
|
||||
cy.get(".label-tag-archived").should("exist");
|
||||
|
||||
cy.visit("/dashboards");
|
||||
@@ -60,7 +56,7 @@ describe("Dashboard", () => {
|
||||
cy.server();
|
||||
cy.route("GET", "**/api/dashboards/*").as("LoadDashboard");
|
||||
cy.createDashboard("Dashboard multiple urls").then(({ id, slug }) => {
|
||||
[`/dashboards/${id}`, `/dashboards/${id}-anything-here`, `/dashboard/${slug}`].forEach(url => {
|
||||
[`/dashboards/${id}`, `/dashboards/${id}-anything-here`, `/dashboard/${slug}`].forEach((url) => {
|
||||
cy.visit(url);
|
||||
cy.wait("@LoadDashboard");
|
||||
cy.getByTestId(`DashboardId${id}Container`).should("exist");
|
||||
@@ -72,7 +68,7 @@ describe("Dashboard", () => {
|
||||
});
|
||||
|
||||
context("viewport width is at 800px", () => {
|
||||
before(function() {
|
||||
before(function () {
|
||||
cy.login();
|
||||
cy.createDashboard("Foo Bar")
|
||||
.then(({ id }) => {
|
||||
@@ -80,49 +76,42 @@ describe("Dashboard", () => {
|
||||
this.dashboardEditUrl = `/dashboards/${id}?edit`;
|
||||
return cy.addTextbox(id, "Hello World!").then(getWidgetTestId);
|
||||
})
|
||||
.then(elTestId => {
|
||||
.then((elTestId) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.getByTestId(elTestId).as("textboxEl");
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
cy.login();
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.viewport(800 + menuWidth, 800);
|
||||
});
|
||||
|
||||
it("shows widgets with full width", () => {
|
||||
cy.get("@textboxEl").should($el => {
|
||||
cy.get("@textboxEl").should(($el) => {
|
||||
expect($el.width()).to.eq(770);
|
||||
});
|
||||
|
||||
cy.viewport(801 + menuWidth, 800);
|
||||
cy.get("@textboxEl").should($el => {
|
||||
expect($el.width()).to.eq(378);
|
||||
cy.get("@textboxEl").should(($el) => {
|
||||
expect($el.width()).to.eq(182);
|
||||
});
|
||||
});
|
||||
|
||||
it("hides edit option", () => {
|
||||
cy.getByTestId("DashboardMoreButton")
|
||||
.click()
|
||||
.should("be.visible");
|
||||
cy.getByTestId("DashboardMoreButton").click().should("be.visible");
|
||||
|
||||
cy.getByTestId("DashboardMoreButtonMenu")
|
||||
.contains("Edit")
|
||||
.as("editButton")
|
||||
.should("not.be.visible");
|
||||
cy.getByTestId("DashboardMoreButtonMenu").contains("Edit").as("editButton").should("not.be.visible");
|
||||
|
||||
cy.viewport(801 + menuWidth, 800);
|
||||
cy.get("@editButton").should("be.visible");
|
||||
});
|
||||
|
||||
it("disables edit mode", function() {
|
||||
it("disables edit mode", function () {
|
||||
cy.viewport(801 + menuWidth, 800);
|
||||
cy.visit(this.dashboardEditUrl);
|
||||
cy.contains("button", "Done Editing")
|
||||
.as("saveButton")
|
||||
.should("exist");
|
||||
cy.contains("button", "Done Editing").as("saveButton").should("exist");
|
||||
|
||||
cy.viewport(800 + menuWidth, 800);
|
||||
cy.contains("button", "Done Editing").should("not.exist");
|
||||
@@ -130,14 +119,14 @@ describe("Dashboard", () => {
|
||||
});
|
||||
|
||||
context("viewport width is at 767px", () => {
|
||||
before(function() {
|
||||
before(function () {
|
||||
cy.login();
|
||||
cy.createDashboard("Foo Bar").then(({ id }) => {
|
||||
this.dashboardUrl = `/dashboards/${id}`;
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.viewport(767, 800);
|
||||
});
|
||||
|
||||
@@ -23,7 +23,7 @@ describe("Dashboard Filters", () => {
|
||||
name: "Query Filters",
|
||||
query: `SELECT stage1 AS "stage1::filter", stage2, value FROM (${SQL}) q`,
|
||||
};
|
||||
cy.createDashboard("Dashboard Filters").then(dashboard => {
|
||||
cy.createDashboard("Dashboard Filters").then((dashboard) => {
|
||||
createQueryAndAddWidget(dashboard.id, queryData)
|
||||
.as("widget1TestId")
|
||||
.then(() => createQueryAndAddWidget(dashboard.id, queryData, { position: { col: 4 } }))
|
||||
@@ -32,26 +32,23 @@ describe("Dashboard Filters", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("filters rows in a Table Visualization", function() {
|
||||
it("filters rows in a Table Visualization", function () {
|
||||
editDashboard();
|
||||
cy.getByTestId("DashboardFilters").should("not.exist");
|
||||
cy.getByTestId("DashboardFiltersCheckbox").click();
|
||||
|
||||
cy.getByTestId("DashboardFilters").within(() => {
|
||||
cy.getByTestId("FilterName-stage1::filter")
|
||||
.find(".ant-select-selection-item")
|
||||
.should("have.text", "a");
|
||||
cy.getByTestId("FilterName-stage1::filter").find(".ant-select-selection-item").should("have.text", "a");
|
||||
});
|
||||
|
||||
cy.getByTestId(this.widget1TestId).within(() => {
|
||||
expectTableToHaveLength(4);
|
||||
expectFirstColumnToHaveMembers(["a", "a", "a", "a"]);
|
||||
|
||||
cy.getByTestId("FilterName-stage1::filter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
cy.getByTestId("FilterName-stage1::filter").find(".ant-select").click();
|
||||
});
|
||||
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.contains(".ant-select-item-option-content:visible", "b").click();
|
||||
|
||||
cy.getByTestId(this.widget1TestId).within(() => {
|
||||
@@ -69,14 +66,13 @@ describe("Dashboard Filters", () => {
|
||||
// assert that changing a global filter affects all widgets
|
||||
|
||||
cy.getByTestId("DashboardFilters").within(() => {
|
||||
cy.getByTestId("FilterName-stage1::filter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
cy.getByTestId("FilterName-stage1::filter").find(".ant-select").click();
|
||||
});
|
||||
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.contains(".ant-select-item-option-content:visible", "c").click();
|
||||
|
||||
[this.widget1TestId, this.widget2TestId].forEach(widgetTestId =>
|
||||
[this.widget1TestId, this.widget2TestId].forEach((widgetTestId) =>
|
||||
cy.getByTestId(widgetTestId).within(() => {
|
||||
expectTableToHaveLength(4);
|
||||
expectFirstColumnToHaveMembers(["c", "c", "c", "c"]);
|
||||
|
||||
@@ -5,7 +5,7 @@ import { getWidgetTestId, editDashboard, resizeBy } from "../../support/dashboar
|
||||
const menuWidth = 80;
|
||||
|
||||
describe("Grid compliant widgets", () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
cy.login();
|
||||
cy.viewport(1215 + menuWidth, 800);
|
||||
cy.createDashboard("Foo Bar")
|
||||
@@ -13,7 +13,7 @@ describe("Grid compliant widgets", () => {
|
||||
this.dashboardUrl = `/dashboards/${id}`;
|
||||
return cy.addTextbox(id, "Hello World!").then(getWidgetTestId);
|
||||
})
|
||||
.then(elTestId => {
|
||||
.then((elTestId) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.getByTestId(elTestId).as("textboxEl");
|
||||
});
|
||||
@@ -27,7 +27,7 @@ describe("Grid compliant widgets", () => {
|
||||
|
||||
it("stays put when dragged under snap threshold", () => {
|
||||
cy.get("@textboxEl")
|
||||
.dragBy(90)
|
||||
.dragBy(30)
|
||||
.invoke("offset")
|
||||
.should("have.property", "left", 15 + menuWidth); // no change, 15 -> 15
|
||||
});
|
||||
@@ -36,14 +36,14 @@ describe("Grid compliant widgets", () => {
|
||||
cy.get("@textboxEl")
|
||||
.dragBy(110)
|
||||
.invoke("offset")
|
||||
.should("have.property", "left", 215 + menuWidth); // moved by 200, 15 -> 215
|
||||
.should("have.property", "left", 115 + menuWidth); // moved by 100, 15 -> 115
|
||||
});
|
||||
|
||||
it("moves two columns when dragged over snap threshold", () => {
|
||||
cy.get("@textboxEl")
|
||||
.dragBy(330)
|
||||
.dragBy(200)
|
||||
.invoke("offset")
|
||||
.should("have.property", "left", 415 + menuWidth); // moved by 400, 15 -> 415
|
||||
.should("have.property", "left", 215 + menuWidth); // moved by 200, 15 -> 215
|
||||
});
|
||||
});
|
||||
|
||||
@@ -52,7 +52,7 @@ describe("Grid compliant widgets", () => {
|
||||
cy.route("POST", "**/api/widgets/*").as("WidgetSave");
|
||||
|
||||
editDashboard();
|
||||
cy.get("@textboxEl").dragBy(330);
|
||||
cy.get("@textboxEl").dragBy(100);
|
||||
cy.wait("@WidgetSave");
|
||||
});
|
||||
});
|
||||
@@ -64,24 +64,24 @@ describe("Grid compliant widgets", () => {
|
||||
});
|
||||
|
||||
it("stays put when dragged under snap threshold", () => {
|
||||
resizeBy(cy.get("@textboxEl"), 90)
|
||||
resizeBy(cy.get("@textboxEl"), 30)
|
||||
.then(() => cy.get("@textboxEl"))
|
||||
.invoke("width")
|
||||
.should("eq", 585); // no change, 585 -> 585
|
||||
.should("eq", 285); // no change, 285 -> 285
|
||||
});
|
||||
|
||||
it("moves one column when dragged over snap threshold", () => {
|
||||
resizeBy(cy.get("@textboxEl"), 110)
|
||||
.then(() => cy.get("@textboxEl"))
|
||||
.invoke("width")
|
||||
.should("eq", 785); // resized by 200, 585 -> 785
|
||||
.should("eq", 385); // resized by 200, 185 -> 385
|
||||
});
|
||||
|
||||
it("moves two columns when dragged over snap threshold", () => {
|
||||
resizeBy(cy.get("@textboxEl"), 400)
|
||||
.then(() => cy.get("@textboxEl"))
|
||||
.invoke("width")
|
||||
.should("eq", 985); // resized by 400, 585 -> 985
|
||||
.should("eq", 685); // resized by 400, 285 -> 685
|
||||
});
|
||||
});
|
||||
|
||||
@@ -101,16 +101,16 @@ describe("Grid compliant widgets", () => {
|
||||
resizeBy(cy.get("@textboxEl"), 0, 30)
|
||||
.then(() => cy.get("@textboxEl"))
|
||||
.invoke("height")
|
||||
.should("eq", 185); // resized by 50, , 135 -> 185
|
||||
.should("eq", 185);
|
||||
});
|
||||
|
||||
it("shrinks to minimum", () => {
|
||||
cy.get("@textboxEl")
|
||||
.then($el => resizeBy(cy.get("@textboxEl"), -$el.width(), -$el.height())) // resize to 0,0
|
||||
.then(($el) => resizeBy(cy.get("@textboxEl"), -$el.width(), -$el.height())) // resize to 0,0
|
||||
.then(() => cy.get("@textboxEl"))
|
||||
.should($el => {
|
||||
.should(($el) => {
|
||||
expect($el.width()).to.eq(185); // min textbox width
|
||||
expect($el.height()).to.eq(35); // min textbox height
|
||||
expect($el.height()).to.eq(85); // min textbox height
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { getWidgetTestId, editDashboard } from "../../support/dashboard";
|
||||
|
||||
describe("Textbox", () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
cy.login();
|
||||
cy.createDashboard("Foo Bar").then(({ id }) => {
|
||||
this.dashboardId = id;
|
||||
@@ -12,12 +12,10 @@ describe("Textbox", () => {
|
||||
});
|
||||
|
||||
const confirmDeletionInModal = () => {
|
||||
cy.get(".ant-modal .ant-btn")
|
||||
.contains("Delete")
|
||||
.click({ force: true });
|
||||
cy.get(".ant-modal .ant-btn").contains("Delete").click({ force: true });
|
||||
};
|
||||
|
||||
it("adds textbox", function() {
|
||||
it("adds textbox", function () {
|
||||
cy.visit(this.dashboardUrl);
|
||||
editDashboard();
|
||||
cy.getByTestId("AddTextboxButton").click();
|
||||
@@ -29,10 +27,10 @@ describe("Textbox", () => {
|
||||
cy.get(".widget-text").should("exist");
|
||||
});
|
||||
|
||||
it("removes textbox by X button", function() {
|
||||
it("removes textbox by X button", function () {
|
||||
cy.addTextbox(this.dashboardId, "Hello World!")
|
||||
.then(getWidgetTestId)
|
||||
.then(elTestId => {
|
||||
.then((elTestId) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
editDashboard();
|
||||
|
||||
@@ -45,32 +43,30 @@ describe("Textbox", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("removes textbox by menu", function() {
|
||||
it("removes textbox by menu", function () {
|
||||
cy.addTextbox(this.dashboardId, "Hello World!")
|
||||
.then(getWidgetTestId)
|
||||
.then(elTestId => {
|
||||
.then((elTestId) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.getByTestId(elTestId).within(() => {
|
||||
cy.getByTestId("WidgetDropdownButton").click();
|
||||
});
|
||||
cy.getByTestId("WidgetDropdownButtonMenu")
|
||||
.contains("Remove from Dashboard")
|
||||
.click();
|
||||
cy.getByTestId("WidgetDropdownButtonMenu").contains("Remove from Dashboard").click();
|
||||
|
||||
confirmDeletionInModal();
|
||||
cy.getByTestId(elTestId).should("not.exist");
|
||||
});
|
||||
});
|
||||
|
||||
it("allows opening menu after removal", function() {
|
||||
it("allows opening menu after removal", function () {
|
||||
let elTestId1;
|
||||
cy.addTextbox(this.dashboardId, "txb 1")
|
||||
.then(getWidgetTestId)
|
||||
.then(elTestId => {
|
||||
.then((elTestId) => {
|
||||
elTestId1 = elTestId;
|
||||
return cy.addTextbox(this.dashboardId, "txb 2").then(getWidgetTestId);
|
||||
})
|
||||
.then(elTestId2 => {
|
||||
.then((elTestId2) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
editDashboard();
|
||||
|
||||
@@ -97,10 +93,10 @@ describe("Textbox", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("edits textbox", function() {
|
||||
it("edits textbox", function () {
|
||||
cy.addTextbox(this.dashboardId, "Hello World!")
|
||||
.then(getWidgetTestId)
|
||||
.then(elTestId => {
|
||||
.then((elTestId) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.getByTestId(elTestId)
|
||||
.as("textboxEl")
|
||||
@@ -108,17 +104,13 @@ describe("Textbox", () => {
|
||||
cy.getByTestId("WidgetDropdownButton").click();
|
||||
});
|
||||
|
||||
cy.getByTestId("WidgetDropdownButtonMenu")
|
||||
.contains("Edit")
|
||||
.click();
|
||||
cy.getByTestId("WidgetDropdownButtonMenu").contains("Edit").click();
|
||||
|
||||
const newContent = "[edited]";
|
||||
cy.getByTestId("TextboxDialog")
|
||||
.should("exist")
|
||||
.within(() => {
|
||||
cy.get("textarea")
|
||||
.clear()
|
||||
.type(newContent);
|
||||
cy.get("textarea").clear().type(newContent);
|
||||
cy.contains("button", "Save").click();
|
||||
});
|
||||
|
||||
@@ -126,7 +118,7 @@ describe("Textbox", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("renders textbox according to position configuration", function() {
|
||||
it("renders textbox according to position configuration", function () {
|
||||
const id = this.dashboardId;
|
||||
const txb1Pos = { col: 0, row: 0, sizeX: 3, sizeY: 2 };
|
||||
const txb2Pos = { col: 1, row: 1, sizeX: 3, sizeY: 4 };
|
||||
@@ -135,15 +127,15 @@ describe("Textbox", () => {
|
||||
cy.addTextbox(id, "x", { position: txb1Pos })
|
||||
.then(() => cy.addTextbox(id, "x", { position: txb2Pos }))
|
||||
.then(getWidgetTestId)
|
||||
.then(elTestId => {
|
||||
.then((elTestId) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
return cy.getByTestId(elTestId);
|
||||
})
|
||||
.should($el => {
|
||||
.should(($el) => {
|
||||
const { top, left } = $el.offset();
|
||||
expect(top).to.be.oneOf([162, 162.015625]);
|
||||
expect(left).to.eq(282);
|
||||
expect($el.width()).to.eq(545);
|
||||
expect(left).to.eq(188);
|
||||
expect($el.width()).to.eq(265);
|
||||
expect($el.height()).to.eq(185);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,8 +5,9 @@ describe("Embedded Queries", () => {
|
||||
});
|
||||
|
||||
it("is unavailable when public urls feature is disabled", () => {
|
||||
cy.createQuery({ query: "select name from users order by name" }).then(query => {
|
||||
cy.createQuery({ query: "select name from users order by name" }).then((query) => {
|
||||
cy.visit(`/queries/${query.id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs", { timeout: 10000 }).should("exist");
|
||||
cy.clickThrough(`
|
||||
@@ -15,7 +16,7 @@ describe("Embedded Queries", () => {
|
||||
`);
|
||||
cy.getByTestId("EmbedIframe")
|
||||
.invoke("text")
|
||||
.then(embedUrl => {
|
||||
.then((embedUrl) => {
|
||||
// disable the feature
|
||||
cy.updateOrgSettings({ disable_public_urls: true });
|
||||
|
||||
@@ -23,9 +24,7 @@ describe("Embedded Queries", () => {
|
||||
cy.visit(`/queries/${query.id}/source`);
|
||||
cy.getByTestId("QueryPageVisualizationTabs", { timeout: 10000 }).should("exist");
|
||||
cy.getByTestId("QueryPageHeaderMoreButton").click();
|
||||
cy.get(".ant-dropdown-menu-item")
|
||||
.should("exist")
|
||||
.should("not.contain", "Show API Key");
|
||||
cy.get(".ant-dropdown-menu-item").should("exist").should("not.contain", "Show API Key");
|
||||
cy.getByTestId("QueryControlDropdownButton").click();
|
||||
cy.get(".ant-dropdown-menu-item").should("exist");
|
||||
cy.getByTestId("ShowEmbedDialogButton").should("not.exist");
|
||||
@@ -42,8 +41,9 @@ describe("Embedded Queries", () => {
|
||||
});
|
||||
|
||||
it("can be shared without parameters", () => {
|
||||
cy.createQuery({ query: "select name from users order by name" }).then(query => {
|
||||
cy.createQuery({ query: "select name from users order by name" }).then((query) => {
|
||||
cy.visit(`/queries/${query.id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs", { timeout: 10000 }).should("exist");
|
||||
cy.clickThrough(`
|
||||
@@ -52,7 +52,7 @@ describe("Embedded Queries", () => {
|
||||
`);
|
||||
cy.getByTestId("EmbedIframe")
|
||||
.invoke("text")
|
||||
.then(embedUrl => {
|
||||
.then((embedUrl) => {
|
||||
cy.logout();
|
||||
cy.visit(embedUrl);
|
||||
cy.getByTestId("VisualizationEmbed", { timeout: 10000 }).should("exist");
|
||||
@@ -90,7 +90,7 @@ describe("Embedded Queries", () => {
|
||||
|
||||
cy.getByTestId("EmbedIframe")
|
||||
.invoke("text")
|
||||
.then(embedUrl => {
|
||||
.then((embedUrl) => {
|
||||
cy.logout();
|
||||
cy.visit(embedUrl);
|
||||
cy.getByTestId("VisualizationEmbed", { timeout: 10000 }).should("exist");
|
||||
|
||||
@@ -44,6 +44,7 @@ describe("Box Plot", () => {
|
||||
.then(({ id }) => cy.createVisualization(id, "BOXPLOT", "Boxplot (Deprecated)", {}))
|
||||
.then(({ id: visualizationId, query_id: queryId }) => {
|
||||
cy.visit(`queries/${queryId}/source#${visualizationId}`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
});
|
||||
@@ -61,9 +62,7 @@ describe("Box Plot", () => {
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("svg")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find("svg").should("exist");
|
||||
|
||||
cy.percySnapshot("Visualizations - Box Plot", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
@@ -31,6 +31,7 @@ describe("Chart", () => {
|
||||
|
||||
it("creates Bar charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
|
||||
const getBarChartAssertionFunction =
|
||||
@@ -109,6 +110,7 @@ describe("Chart", () => {
|
||||
});
|
||||
it("colors Bar charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
@@ -123,6 +125,7 @@ describe("Chart", () => {
|
||||
});
|
||||
it("colors Pie charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.GlobalSeriesType").click();
|
||||
|
||||
@@ -34,6 +34,7 @@ describe("Choropleth", () => {
|
||||
cy.login();
|
||||
cy.createQuery({ query: SQL }).then(({ id }) => {
|
||||
cy.visit(`queries/${id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
@@ -76,9 +77,7 @@ describe("Choropleth", () => {
|
||||
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".map-visualization-container.leaflet-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".map-visualization-container.leaflet-container").should("exist");
|
||||
|
||||
cy.percySnapshot("Visualizations - Choropleth", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
@@ -24,6 +24,7 @@ describe("Cohort", () => {
|
||||
cy.login();
|
||||
cy.createQuery({ query: SQL }).then(({ id }) => {
|
||||
cy.visit(`queries/${id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
@@ -51,9 +52,7 @@ describe("Cohort", () => {
|
||||
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find("table").should("exist");
|
||||
cy.percySnapshot("Visualizations - Cohort (simple)", { widths: [viewportWidth] });
|
||||
|
||||
cy.clickThrough(`
|
||||
@@ -64,9 +63,7 @@ describe("Cohort", () => {
|
||||
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find("table").should("exist");
|
||||
cy.percySnapshot("Visualizations - Cohort (diagonal)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,6 +12,7 @@ describe("Counter", () => {
|
||||
cy.login();
|
||||
cy.createQuery({ query: SQL }).then(({ id }) => {
|
||||
cy.visit(`queries/${id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
@@ -24,9 +25,7 @@ describe("Counter", () => {
|
||||
Counter.General.ValueColumn.a
|
||||
`);
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -43,9 +42,7 @@ describe("Counter", () => {
|
||||
"Counter.General.Label": "Custom Label",
|
||||
});
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -65,9 +62,7 @@ describe("Counter", () => {
|
||||
"Counter.General.TargetValueRowNumber": "2",
|
||||
});
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -83,9 +78,7 @@ describe("Counter", () => {
|
||||
Counter.General.TargetValueColumn.b
|
||||
`);
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -106,9 +99,7 @@ describe("Counter", () => {
|
||||
"Counter.General.TargetValueRowNumber": "2",
|
||||
});
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -123,9 +114,7 @@ describe("Counter", () => {
|
||||
Counter.General.CountRows
|
||||
`);
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -151,9 +140,7 @@ describe("Counter", () => {
|
||||
"Counter.Formatting.StringSuffix": "%",
|
||||
});
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -180,9 +167,7 @@ describe("Counter", () => {
|
||||
"Counter.Formatting.StringSuffix": "%",
|
||||
});
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".counter-visualization-container")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find(".counter-visualization-container").should("exist");
|
||||
|
||||
// wait a bit before taking snapshot
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
|
||||
@@ -5,34 +5,25 @@ describe("Edit visualization dialog", () => {
|
||||
cy.login();
|
||||
cy.createQuery().then(({ id }) => {
|
||||
cy.visit(`queries/${id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
});
|
||||
|
||||
it("opens New Visualization dialog", () => {
|
||||
cy.getByTestId("NewVisualization")
|
||||
.should("exist")
|
||||
.click();
|
||||
cy.getByTestId("NewVisualization").should("exist").click();
|
||||
cy.getByTestId("EditVisualizationDialog").should("exist");
|
||||
// Default visualization should be selected
|
||||
cy.getByTestId("VisualizationType")
|
||||
.should("exist")
|
||||
.should("contain", "Chart");
|
||||
cy.getByTestId("VisualizationName")
|
||||
.should("exist")
|
||||
.should("have.value", "Chart");
|
||||
cy.getByTestId("VisualizationType").should("exist").should("contain", "Chart");
|
||||
cy.getByTestId("VisualizationName").should("exist").should("have.value", "Chart");
|
||||
});
|
||||
|
||||
it("opens Edit Visualization dialog", () => {
|
||||
cy.getByTestId("EditVisualization").click();
|
||||
cy.getByTestId("EditVisualizationDialog").should("exist");
|
||||
// Default `Table` visualization should be selected
|
||||
cy.getByTestId("VisualizationType")
|
||||
.should("exist")
|
||||
.should("contain", "Table");
|
||||
cy.getByTestId("VisualizationName")
|
||||
.should("exist")
|
||||
.should("have.value", "Table");
|
||||
cy.getByTestId("VisualizationType").should("exist").should("contain", "Table");
|
||||
cy.getByTestId("VisualizationName").should("exist").should("have.value", "Table");
|
||||
});
|
||||
|
||||
it("creates visualization with custom name", () => {
|
||||
@@ -44,15 +35,9 @@ describe("Edit visualization dialog", () => {
|
||||
VisualizationType.TABLE
|
||||
`);
|
||||
|
||||
cy.getByTestId("VisualizationName")
|
||||
.clear()
|
||||
.type(visualizationName);
|
||||
cy.getByTestId("VisualizationName").clear().type(visualizationName);
|
||||
|
||||
cy.getByTestId("EditVisualizationDialog")
|
||||
.contains("button", "Save")
|
||||
.click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs")
|
||||
.contains("span", visualizationName)
|
||||
.should("exist");
|
||||
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs").contains("span", visualizationName).should("exist");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -25,6 +25,7 @@ describe("Funnel", () => {
|
||||
cy.login();
|
||||
cy.createQuery({ query: SQL }).then(({ id }) => {
|
||||
cy.visit(`queries/${id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
});
|
||||
@@ -59,9 +60,7 @@ describe("Funnel", () => {
|
||||
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find("table").should("exist");
|
||||
cy.percySnapshot("Visualizations - Funnel (basic)", { widths: [viewportWidth] });
|
||||
|
||||
cy.clickThrough(`
|
||||
@@ -81,9 +80,7 @@ describe("Funnel", () => {
|
||||
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationPreview").find("table").should("exist");
|
||||
cy.percySnapshot("Visualizations - Funnel (extra options)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -24,6 +24,7 @@ describe("Map (Markers)", () => {
|
||||
.then(({ id }) => cy.createVisualization(id, "MAP", "Map (Markers)", { mapTileUrl }))
|
||||
.then(({ id: visualizationId, query_id: queryId }) => {
|
||||
cy.visit(`queries/${queryId}/source#${visualizationId}`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
});
|
||||
@@ -51,9 +52,7 @@ describe("Map (Markers)", () => {
|
||||
cy.fillInputs({ "ColorPicker.CustomColor": "blue{enter}" });
|
||||
cy.getByTestId("ColorPicker.CustomColor").should("not.be.visible");
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".leaflet-control-zoom-in")
|
||||
.click();
|
||||
cy.getByTestId("VisualizationPreview").find(".leaflet-control-zoom-in").click();
|
||||
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(1000); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
@@ -85,9 +84,7 @@ describe("Map (Markers)", () => {
|
||||
cy.fillInputs({ "ColorPicker.CustomColor": "maroon{enter}" });
|
||||
cy.getByTestId("ColorPicker.CustomColor").should("not.be.visible");
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find(".leaflet-control-zoom-in")
|
||||
.click();
|
||||
cy.getByTestId("VisualizationPreview").find(".leaflet-control-zoom-in").click();
|
||||
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(1000); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
|
||||
@@ -19,9 +19,7 @@ const SQL = `
|
||||
function createPivotThroughUI(visualizationName, options = {}) {
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.PIVOT");
|
||||
cy.getByTestId("VisualizationName")
|
||||
.clear()
|
||||
.type(visualizationName);
|
||||
cy.getByTestId("VisualizationName").clear().type(visualizationName);
|
||||
if (options.hideControls) {
|
||||
cy.getByTestId("PivotEditor.HideControls").click();
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
@@ -29,36 +27,30 @@ function createPivotThroughUI(visualizationName, options = {}) {
|
||||
.find(".pvtAxisContainer, .pvtRenderer, .pvtVals")
|
||||
.should("be.not.visible");
|
||||
}
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("EditVisualizationDialog")
|
||||
.contains("button", "Save")
|
||||
.click();
|
||||
cy.getByTestId("VisualizationPreview").find("table").should("exist");
|
||||
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
|
||||
}
|
||||
|
||||
describe("Pivot", () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
cy.createQuery({ name: "Pivot Visualization", query: SQL })
|
||||
.its("id")
|
||||
.as("queryId");
|
||||
cy.createQuery({ name: "Pivot Visualization", query: SQL }).its("id").as("queryId");
|
||||
});
|
||||
|
||||
it("creates Pivot with controls", function() {
|
||||
it("creates Pivot with controls", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
|
||||
const visualizationName = "Pivot";
|
||||
createPivotThroughUI(visualizationName);
|
||||
|
||||
cy.getByTestId("QueryPageVisualizationTabs")
|
||||
.contains("span", visualizationName)
|
||||
.should("exist");
|
||||
cy.getByTestId("QueryPageVisualizationTabs").contains("span", visualizationName).should("exist");
|
||||
});
|
||||
|
||||
it("creates Pivot without controls", function() {
|
||||
it("creates Pivot without controls", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
|
||||
const visualizationName = "Pivot";
|
||||
@@ -76,7 +68,7 @@ describe("Pivot", () => {
|
||||
.should("be.not.visible");
|
||||
});
|
||||
|
||||
it("updates the visualization when results change", function() {
|
||||
it("updates the visualization when results change", function () {
|
||||
const options = {
|
||||
aggregatorName: "Count",
|
||||
data: [], // force it to have a data object, although it shouldn't
|
||||
@@ -86,8 +78,9 @@ describe("Pivot", () => {
|
||||
vals: ["value"],
|
||||
};
|
||||
|
||||
cy.createVisualization(this.queryId, "PIVOT", "Pivot", options).then(visualization => {
|
||||
cy.createVisualization(this.queryId, "PIVOT", "Pivot", options).then((visualization) => {
|
||||
cy.visit(`queries/${this.queryId}/source#${visualization.id}`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
|
||||
// assert number of rows is 11
|
||||
@@ -104,16 +97,14 @@ describe("Pivot", () => {
|
||||
cy.wait(200);
|
||||
|
||||
cy.getByTestId("SaveButton").click();
|
||||
cy.getByTestId("ExecuteButton")
|
||||
.should("be.enabled")
|
||||
.click();
|
||||
cy.getByTestId("ExecuteButton").should("be.enabled").click();
|
||||
|
||||
// assert number of rows is 12
|
||||
cy.getByTestId("PivotTableVisualization").contains(".pvtGrandTotal", "12");
|
||||
});
|
||||
});
|
||||
|
||||
it("takes a snapshot with different configured Pivots", function() {
|
||||
it("takes a snapshot with different configured Pivots", function () {
|
||||
const options = {
|
||||
aggregatorName: "Sum",
|
||||
controls: { enabled: true },
|
||||
@@ -142,19 +133,20 @@ describe("Pivot", () => {
|
||||
];
|
||||
|
||||
cy.createDashboard("Pivot Visualization")
|
||||
.then(dashboard => {
|
||||
.then((dashboard) => {
|
||||
this.dashboardUrl = `/dashboards/${dashboard.id}`;
|
||||
return cy.all(
|
||||
pivotTables.map(pivot => () =>
|
||||
cy
|
||||
.createVisualization(this.queryId, "PIVOT", pivot.name, pivot.options)
|
||||
.then(visualization => cy.addWidget(dashboard.id, visualization.id, { position: pivot.position }))
|
||||
pivotTables.map(
|
||||
(pivot) => () =>
|
||||
cy
|
||||
.createVisualization(this.queryId, "PIVOT", pivot.name, pivot.options)
|
||||
.then((visualization) => cy.addWidget(dashboard.id, visualization.id, { position: pivot.position }))
|
||||
)
|
||||
);
|
||||
})
|
||||
.then(widgets => {
|
||||
.then((widgets) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
widgets.forEach(widget => {
|
||||
widgets.forEach((widget) => {
|
||||
cy.getByTestId(getWidgetTestId(widget)).within(() =>
|
||||
cy.getByTestId("PivotTableVisualization").should("exist")
|
||||
);
|
||||
|
||||
@@ -25,6 +25,7 @@ describe("Sankey and Sunburst", () => {
|
||||
beforeEach(() => {
|
||||
cy.createQuery({ query: SQL }).then(({ id }) => {
|
||||
cy.visit(`queries/${id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.SUNBURST_SEQUENCE");
|
||||
@@ -34,37 +35,21 @@ describe("Sankey and Sunburst", () => {
|
||||
it("creates Sunburst", () => {
|
||||
const visualizationName = "Sunburst";
|
||||
|
||||
cy.getByTestId("VisualizationName")
|
||||
.clear()
|
||||
.type(visualizationName);
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("svg")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationName").clear().type(visualizationName);
|
||||
cy.getByTestId("VisualizationPreview").find("svg").should("exist");
|
||||
|
||||
cy.getByTestId("EditVisualizationDialog")
|
||||
.contains("button", "Save")
|
||||
.click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs")
|
||||
.contains("span", visualizationName)
|
||||
.should("exist");
|
||||
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs").contains("span", visualizationName).should("exist");
|
||||
});
|
||||
|
||||
it("creates Sankey", () => {
|
||||
const visualizationName = "Sankey";
|
||||
|
||||
cy.getByTestId("VisualizationName")
|
||||
.clear()
|
||||
.type(visualizationName);
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("svg")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationName").clear().type(visualizationName);
|
||||
cy.getByTestId("VisualizationPreview").find("svg").should("exist");
|
||||
|
||||
cy.getByTestId("EditVisualizationDialog")
|
||||
.contains("button", "Save")
|
||||
.click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs")
|
||||
.contains("span", visualizationName)
|
||||
.should("exist");
|
||||
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs").contains("span", visualizationName).should("exist");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -92,21 +77,22 @@ describe("Sankey and Sunburst", () => {
|
||||
},
|
||||
];
|
||||
|
||||
it("takes a snapshot with Sunburst (1 - 5 stages)", function() {
|
||||
cy.createDashboard("Sunburst Visualization").then(dashboard => {
|
||||
it("takes a snapshot with Sunburst (1 - 5 stages)", function () {
|
||||
cy.createDashboard("Sunburst Visualization").then((dashboard) => {
|
||||
this.dashboardUrl = `/dashboards/${dashboard.id}`;
|
||||
return cy
|
||||
.all(
|
||||
STAGES_WIDGETS.map(sunburst => () =>
|
||||
cy
|
||||
.createQuery({ name: `Sunburst with ${sunburst.name}`, query: sunburst.query })
|
||||
.then(queryData => cy.createVisualization(queryData.id, "SUNBURST_SEQUENCE", "Sunburst", {}))
|
||||
.then(visualization => cy.addWidget(dashboard.id, visualization.id, { position: sunburst.position }))
|
||||
STAGES_WIDGETS.map(
|
||||
(sunburst) => () =>
|
||||
cy
|
||||
.createQuery({ name: `Sunburst with ${sunburst.name}`, query: sunburst.query })
|
||||
.then((queryData) => cy.createVisualization(queryData.id, "SUNBURST_SEQUENCE", "Sunburst", {}))
|
||||
.then((visualization) => cy.addWidget(dashboard.id, visualization.id, { position: sunburst.position }))
|
||||
)
|
||||
)
|
||||
.then(widgets => {
|
||||
.then((widgets) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
widgets.forEach(widget => {
|
||||
widgets.forEach((widget) => {
|
||||
cy.getByTestId(getWidgetTestId(widget)).within(() => cy.get("svg").should("exist"));
|
||||
});
|
||||
|
||||
@@ -117,21 +103,22 @@ describe("Sankey and Sunburst", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("takes a snapshot with Sankey (1 - 5 stages)", function() {
|
||||
cy.createDashboard("Sankey Visualization").then(dashboard => {
|
||||
it("takes a snapshot with Sankey (1 - 5 stages)", function () {
|
||||
cy.createDashboard("Sankey Visualization").then((dashboard) => {
|
||||
this.dashboardUrl = `/dashboards/${dashboard.id}`;
|
||||
return cy
|
||||
.all(
|
||||
STAGES_WIDGETS.map(sankey => () =>
|
||||
cy
|
||||
.createQuery({ name: `Sankey with ${sankey.name}`, query: sankey.query })
|
||||
.then(queryData => cy.createVisualization(queryData.id, "SANKEY", "Sankey", {}))
|
||||
.then(visualization => cy.addWidget(dashboard.id, visualization.id, { position: sankey.position }))
|
||||
STAGES_WIDGETS.map(
|
||||
(sankey) => () =>
|
||||
cy
|
||||
.createQuery({ name: `Sankey with ${sankey.name}`, query: sankey.query })
|
||||
.then((queryData) => cy.createVisualization(queryData.id, "SANKEY", "Sankey", {}))
|
||||
.then((visualization) => cy.addWidget(dashboard.id, visualization.id, { position: sankey.position }))
|
||||
)
|
||||
)
|
||||
.then(widgets => {
|
||||
.then((widgets) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
widgets.forEach(widget => {
|
||||
widgets.forEach((widget) => {
|
||||
cy.getByTestId(getWidgetTestId(widget)).within(() => cy.get("svg").should("exist"));
|
||||
});
|
||||
|
||||
|
||||
@@ -64,6 +64,7 @@ describe("Word Cloud", () => {
|
||||
cy.login();
|
||||
cy.createQuery({ query: SQL }).then(({ id }) => {
|
||||
cy.visit(`queries/${id}/source`);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
});
|
||||
cy.document().then(injectFont);
|
||||
@@ -80,9 +81,7 @@ describe("Word Cloud", () => {
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("svg text")
|
||||
.should("have.length", 11);
|
||||
cy.getByTestId("VisualizationPreview").find("svg text").should("have.length", 11);
|
||||
|
||||
cy.percySnapshot("Visualizations - Word Cloud (Automatic word frequencies)", { widths: [viewportWidth] });
|
||||
});
|
||||
@@ -99,9 +98,7 @@ describe("Word Cloud", () => {
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("svg text")
|
||||
.should("have.length", 5);
|
||||
cy.getByTestId("VisualizationPreview").find("svg text").should("have.length", 5);
|
||||
|
||||
cy.percySnapshot("Visualizations - Word Cloud (Frequencies from another column)", { widths: [viewportWidth] });
|
||||
});
|
||||
@@ -125,9 +122,7 @@ describe("Word Cloud", () => {
|
||||
// Wait for proper initialization of visualization
|
||||
cy.wait(500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("svg text")
|
||||
.should("have.length", 2);
|
||||
cy.getByTestId("VisualizationPreview").find("svg text").should("have.length", 2);
|
||||
|
||||
cy.percySnapshot("Visualizations - Word Cloud (With filters)", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
@@ -3,36 +3,26 @@
|
||||
* @param should Passed to should expression after plot points are captured
|
||||
*/
|
||||
export function assertPlotPreview(should = "exist") {
|
||||
cy.getByTestId("VisualizationPreview")
|
||||
.find("g.plot")
|
||||
.should("exist")
|
||||
.find("g.points")
|
||||
.should(should);
|
||||
cy.getByTestId("VisualizationPreview").find("g.overplot").should("exist").find("g.points").should(should);
|
||||
}
|
||||
|
||||
export function createChartThroughUI(chartName, chartSpecificAssertionFn = () => {}) {
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.CHART");
|
||||
cy.getByTestId("VisualizationName")
|
||||
.clear()
|
||||
.type(chartName);
|
||||
cy.getByTestId("VisualizationName").clear().type(chartName);
|
||||
|
||||
chartSpecificAssertionFn();
|
||||
|
||||
cy.server();
|
||||
cy.route("POST", "**/api/visualizations").as("SaveVisualization");
|
||||
|
||||
cy.getByTestId("EditVisualizationDialog")
|
||||
.contains("button", "Save")
|
||||
.click();
|
||||
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
|
||||
|
||||
cy.getByTestId("QueryPageVisualizationTabs")
|
||||
.contains("span", chartName)
|
||||
.should("exist");
|
||||
cy.getByTestId("QueryPageVisualizationTabs").contains("span", chartName).should("exist");
|
||||
|
||||
cy.wait("@SaveVisualization").should("have.property", "status", 200);
|
||||
|
||||
return cy.get("@SaveVisualization").then(xhr => {
|
||||
return cy.get("@SaveVisualization").then((xhr) => {
|
||||
const { id, name, options } = xhr.response.body;
|
||||
return cy.wrap({ id, name, options });
|
||||
});
|
||||
@@ -42,19 +32,13 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
|
||||
cy.getByTestId("Chart.GlobalSeriesType").should("exist");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Series").click();
|
||||
cy.getByTestId("VisualizationEditor")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationEditor").find("table").should("exist");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("VisualizationEditor")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationEditor").find("table").should("exist");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.DataLabels").click();
|
||||
cy.getByTestId("VisualizationEditor")
|
||||
.getByTestId("Chart.DataLabels.ShowDataLabels")
|
||||
.should("exist");
|
||||
cy.getByTestId("VisualizationEditor").getByTestId("Chart.DataLabels.ShowDataLabels").should("exist");
|
||||
|
||||
chartSpecificTabbedEditorAssertionFn();
|
||||
|
||||
@@ -63,39 +47,29 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
|
||||
|
||||
export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) {
|
||||
cy.getByTestId("VisualizationEditor.Tabs.XAxis").click();
|
||||
cy.getByTestId("Chart.XAxis.Type")
|
||||
.contains(".ant-select-selection-item", "Auto Detect")
|
||||
.should("exist");
|
||||
cy.getByTestId("Chart.XAxis.Type").contains(".ant-select-selection-item", "Auto Detect").should("exist");
|
||||
|
||||
cy.getByTestId("Chart.XAxis.Name")
|
||||
.clear()
|
||||
.type(xaxisLabel);
|
||||
cy.getByTestId("Chart.XAxis.Name").clear().type(xaxisLabel);
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.YAxis").click();
|
||||
cy.getByTestId("Chart.LeftYAxis.Type")
|
||||
.contains(".ant-select-selection-item", "Linear")
|
||||
.should("exist");
|
||||
cy.getByTestId("Chart.LeftYAxis.Type").contains(".ant-select-selection-item", "Linear").should("exist");
|
||||
|
||||
cy.getByTestId("Chart.LeftYAxis.Name")
|
||||
.clear()
|
||||
.type(yaxisLabel);
|
||||
cy.getByTestId("Chart.LeftYAxis.Name").clear().type(yaxisLabel);
|
||||
|
||||
cy.getByTestId("Chart.LeftYAxis.TickFormat")
|
||||
.clear()
|
||||
.type("+");
|
||||
cy.getByTestId("Chart.LeftYAxis.TickFormat").clear().type("+");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.General").click();
|
||||
}
|
||||
|
||||
export function createDashboardWithCharts(title, chartGetters, widgetsAssertionFn = () => {}) {
|
||||
cy.createDashboard(title).then(dashboard => {
|
||||
cy.createDashboard(title).then((dashboard) => {
|
||||
const dashboardUrl = `/dashboards/${dashboard.id}`;
|
||||
const widgetGetters = chartGetters.map(chartGetter => `${chartGetter}Widget`);
|
||||
const widgetGetters = chartGetters.map((chartGetter) => `${chartGetter}Widget`);
|
||||
|
||||
chartGetters.forEach((chartGetter, i) => {
|
||||
const position = { autoHeight: false, sizeY: 8, sizeX: 3, col: (i % 2) * 3 };
|
||||
cy.get(`@${chartGetter}`)
|
||||
.then(chart => cy.addWidget(dashboard.id, chart.id, { position }))
|
||||
.then((chart) => cy.addWidget(dashboard.id, chart.id, { position }))
|
||||
.as(widgetGetters[i]);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
services:
|
||||
.redash:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
FRONTEND_BUILD_MODE: ${FRONTEND_BUILD_MODE:-2}
|
||||
INSTALL_GROUPS: ${INSTALL_GROUPS:-main,all_ds,dev}
|
||||
volumes:
|
||||
- $PWD:${SERVER_MOUNT:-/ignore}
|
||||
command: manage version
|
||||
environment:
|
||||
REDASH_LOG_LEVEL: INFO
|
||||
REDASH_REDIS_URL: redis://redis:6379/0
|
||||
REDASH_DATABASE_URL: postgresql://postgres@postgres/postgres
|
||||
REDASH_RATELIMIT_ENABLED: false
|
||||
REDASH_MAIL_DEFAULT_SENDER: redash@example.com
|
||||
REDASH_MAIL_SERVER: email
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: true
|
||||
REDASH_COOKIE_SECRET: ${REDASH_COOKIE_SECRET}
|
||||
REDASH_SECRET_KEY: ${REDASH_SECRET_KEY}
|
||||
REDASH_PRODUCTION: ${REDASH_PRODUCTION:-true}
|
||||
env_file:
|
||||
- .env
|
||||
@@ -10,6 +10,7 @@ x-redash-service: &redash-service
|
||||
env_file:
|
||||
- .env
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_HOST: http://localhost:5001
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
@@ -52,7 +53,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: postgres:18-alpine
|
||||
ports:
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
|
||||
26
migrations/versions/1655999df5e3_default_alert_selector.py
Normal file
26
migrations/versions/1655999df5e3_default_alert_selector.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""set default alert selector
|
||||
|
||||
Revision ID: 1655999df5e3
|
||||
Revises: 9e8c841d1a30
|
||||
Create Date: 2025-07-09 14:44:00
|
||||
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1655999df5e3'
|
||||
down_revision = '9e8c841d1a30'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("""
|
||||
UPDATE alerts
|
||||
SET options = jsonb_set(options, '{selector}', '"first"')
|
||||
WHERE options->>'selector' IS NULL;
|
||||
""")
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
64
migrations/versions/9e8c841d1a30_fix_hash.py
Normal file
64
migrations/versions/9e8c841d1a30_fix_hash.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""fix_hash
|
||||
|
||||
Revision ID: 9e8c841d1a30
|
||||
Revises: 7205816877ec
|
||||
Create Date: 2024-10-05 18:55:35.730573
|
||||
|
||||
"""
|
||||
import logging
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy import select
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, get_query_runner
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9e8c841d1a30'
|
||||
down_revision = '7205816877ec'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def update_query_hash(record):
|
||||
should_apply_auto_limit = record['options'].get("apply_auto_limit", False) if record['options'] else False
|
||||
query_runner = get_query_runner(record['type'], {}) if record['type'] else BaseQueryRunner({})
|
||||
query_text = record['query']
|
||||
|
||||
parameters_dict = {p["name"]: p.get("value") for p in record['options'].get('parameters', [])} if record.options else {}
|
||||
if any(parameters_dict):
|
||||
print(f"Query {record['query_id']} has parameters. Hash might be incorrect.")
|
||||
|
||||
return query_runner.gen_query_hash(query_text, should_apply_auto_limit)
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
metadata = sa.MetaData(bind=conn)
|
||||
queries = sa.Table("queries", metadata, autoload=True)
|
||||
data_sources = sa.Table("data_sources", metadata, autoload=True)
|
||||
|
||||
joined_table = queries.outerjoin(data_sources, queries.c.data_source_id == data_sources.c.id)
|
||||
|
||||
query = select([
|
||||
queries.c.id.label("query_id"),
|
||||
queries.c.query,
|
||||
queries.c.query_hash,
|
||||
queries.c.options,
|
||||
data_sources.c.id.label("data_source_id"),
|
||||
data_sources.c.type
|
||||
]).select_from(joined_table)
|
||||
|
||||
for record in conn.execute(query):
|
||||
new_hash = update_query_hash(record)
|
||||
print(f"Updating hash for query {record['query_id']} from {record['query_hash']} to {new_hash}")
|
||||
conn.execute(
|
||||
queries.update()
|
||||
.where(queries.c.id == record['query_id'])
|
||||
.values(query_hash=new_hash))
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
@@ -0,0 +1,34 @@
|
||||
"""12-column dashboard layout
|
||||
|
||||
Revision ID: db0aca1ebd32
|
||||
Revises: 1655999df5e3
|
||||
Create Date: 2025-03-31 13:45:43.160893
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'db0aca1ebd32'
|
||||
down_revision = '1655999df5e3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("""
|
||||
UPDATE widgets
|
||||
SET options = jsonb_set(options, '{position,col}', to_json((options->'position'->>'col')::int * 2)::jsonb);
|
||||
UPDATE widgets
|
||||
SET options = jsonb_set(options, '{position,sizeX}', to_json((options->'position'->>'sizeX')::int * 2)::jsonb);
|
||||
""")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute("""
|
||||
UPDATE widgets
|
||||
SET options = jsonb_set(options, '{position,col}', to_json((options->'position'->>'col')::int / 2)::jsonb);
|
||||
UPDATE widgets
|
||||
SET options = jsonb_set(options, '{position,sizeX}', to_json((options->'position'->>'sizeX')::int / 2)::jsonb);
|
||||
""")
|
||||
27
package.json
27
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "24.09.0-dev",
|
||||
"version": "25.12.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
@@ -46,8 +46,8 @@
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^4.2.1",
|
||||
"@redash/viz": "file:viz-lib",
|
||||
"ace-builds": "^1.4.12",
|
||||
"antd": "^4.4.3",
|
||||
"ace-builds": "^1.43.3",
|
||||
"antd": "4.4.3",
|
||||
"axios": "0.27.2",
|
||||
"axios-auth-refresh": "3.3.6",
|
||||
"bootstrap": "^3.4.1",
|
||||
@@ -55,7 +55,7 @@
|
||||
"d3": "^3.5.17",
|
||||
"debug": "^3.2.7",
|
||||
"dompurify": "^2.0.17",
|
||||
"elliptic": "^6.5.7",
|
||||
"elliptic": "^6.6.0",
|
||||
"font-awesome": "^4.7.0",
|
||||
"history": "^4.10.1",
|
||||
"hoist-non-react-statics": "^3.3.0",
|
||||
@@ -64,11 +64,11 @@
|
||||
"mousetrap": "^1.6.1",
|
||||
"mustache": "^2.3.0",
|
||||
"numeral": "^2.0.6",
|
||||
"path-to-regexp": "^3.1.0",
|
||||
"path-to-regexp": "^3.3.0",
|
||||
"prop-types": "^15.6.1",
|
||||
"query-string": "^6.9.0",
|
||||
"react": "16.14.0",
|
||||
"react-ace": "^9.1.1",
|
||||
"react-ace": "^14.0.1",
|
||||
"react-dom": "^16.14.0",
|
||||
"react-grid-layout": "^0.18.2",
|
||||
"react-resizable": "^1.10.1",
|
||||
@@ -100,6 +100,7 @@
|
||||
"@types/sql-formatter": "^2.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^2.10.0",
|
||||
"@typescript-eslint/parser": "^2.10.0",
|
||||
"assert": "^2.1.0",
|
||||
"atob": "^2.1.2",
|
||||
"babel-eslint": "^10.0.3",
|
||||
"babel-jest": "^24.1.0",
|
||||
@@ -138,20 +139,24 @@
|
||||
"mini-css-extract-plugin": "^1.6.2",
|
||||
"mockdate": "^2.0.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "^1.19.1",
|
||||
"prettier": "3.3.2",
|
||||
"process": "^0.11.10",
|
||||
"raw-loader": "^0.5.1",
|
||||
"react-refresh": "^0.14.0",
|
||||
"react-test-renderer": "^16.14.0",
|
||||
"request-cookies": "^1.1.0",
|
||||
"source-map-loader": "^1.1.3",
|
||||
"stream-browserify": "^3.0.0",
|
||||
"style-loader": "^2.0.0",
|
||||
"typescript": "^4.1.2",
|
||||
"typescript": "4.1.2",
|
||||
"url": "^0.11.4",
|
||||
"url-loader": "^4.1.1",
|
||||
"webpack": "^4.46.0",
|
||||
"webpack-build-notifier": "^2.3.0",
|
||||
"webpack": "^5.101.3",
|
||||
"webpack-build-notifier": "^3.0.1",
|
||||
"webpack-bundle-analyzer": "^4.9.0",
|
||||
"webpack-cli": "^4.10.0",
|
||||
"webpack-dev-server": "^4.15.1",
|
||||
"webpack-manifest-plugin": "^2.0.4"
|
||||
"webpack-manifest-plugin": "^5.0.1"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "^2.3.2"
|
||||
|
||||
3365
poetry.lock
generated
3365
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,17 @@
|
||||
[project]
|
||||
name = "redash"
|
||||
version = "25.12.0-dev"
|
||||
requires-python = ">=3.8"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = [
|
||||
{ name = "Arik Fraimovich", email = "<arik@redash.io>" }
|
||||
]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
maintainers = [
|
||||
{ name = "Redash maintainers and contributors", email = "<maintainers@redash.io>" }
|
||||
]
|
||||
readme = "README.md"
|
||||
dependencies = []
|
||||
|
||||
[tool.black]
|
||||
target-version = ['py38']
|
||||
@@ -10,17 +22,6 @@ force-exclude = '''
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "24.09.0-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
maintainers = [
|
||||
"Redash maintainers and contributors <maintainers@redash.io>",
|
||||
]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8,<3.11"
|
||||
advocate = "1.0.0"
|
||||
@@ -29,7 +30,7 @@ authlib = "0.15.5"
|
||||
backoff = "2.2.1"
|
||||
blinker = "1.6.2"
|
||||
click = "8.1.3"
|
||||
cryptography = "42.0.8"
|
||||
cryptography = "43.0.1"
|
||||
disposable-email-domains = ">=0.0.52"
|
||||
flask = "2.3.2"
|
||||
flask-limiter = "3.3.1"
|
||||
@@ -46,7 +47,7 @@ greenlet = "2.0.2"
|
||||
gunicorn = "22.0.0"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.4"
|
||||
jinja2 = "3.1.5"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
@@ -65,7 +66,7 @@ pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.32.3"
|
||||
restrictedpython = "6.2"
|
||||
restrictedpython = "7.3"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
semver = "2.8.1"
|
||||
@@ -86,13 +87,17 @@ wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
debugpy = "^1.8.9"
|
||||
paramiko = "3.4.1"
|
||||
oracledb = "2.5.1"
|
||||
ibm-db = { version = "^3.2.7", markers = "platform_machine == 'x86_64' or platform_machine == 'AMD64'" }
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.all_ds.dependencies]
|
||||
atsd-client = "3.0.5"
|
||||
azure-kusto-data = "0.0.35"
|
||||
azure-kusto-data = "5.0.1"
|
||||
boto3 = "1.28.8"
|
||||
botocore = "1.31.8"
|
||||
cassandra-driver = "3.21.0"
|
||||
@@ -100,6 +105,7 @@ certifi = ">=2019.9.11"
|
||||
cmem-cmempy = "21.2.3"
|
||||
databend-py = "0.4.6"
|
||||
databend-sqlalchemy = "0.2.4"
|
||||
duckdb = "1.3.2"
|
||||
google-api-python-client = "1.7.11"
|
||||
gspread = "5.11.2"
|
||||
impyla = "0.16.0"
|
||||
@@ -107,30 +113,30 @@ influxdb = "5.2.3"
|
||||
influxdb-client = "1.38.0"
|
||||
memsql = "3.2.0"
|
||||
mysqlclient = "2.1.1"
|
||||
numpy = "1.24.4"
|
||||
nzalchemy = "^11.0.2"
|
||||
nzpy = ">=1.15"
|
||||
oauth2client = "4.1.3"
|
||||
openpyxl = "3.0.7"
|
||||
oracledb = "2.1.2"
|
||||
pandas = "1.3.4"
|
||||
phoenixdb = "0.7"
|
||||
pinotdb = ">=0.4.5"
|
||||
protobuf = "3.20.2"
|
||||
pyathena = ">=1.5.0,<=1.11.5"
|
||||
pyathena = "2.25.2"
|
||||
pydgraph = "2.0.2"
|
||||
pydruid = "0.5.7"
|
||||
pyexasol = "0.12.0"
|
||||
pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymssql = "2.2.8"
|
||||
pymssql = "^2.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.20"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
snowflake-connector-python = "3.12.0"
|
||||
snowflake-connector-python = "3.12.3"
|
||||
td-client = "1.0.0"
|
||||
thrift = ">=0.8.0"
|
||||
thrift-sasl = ">=0.1.0"
|
||||
@@ -156,7 +162,6 @@ jwcrypto = "1.5.6"
|
||||
mock = "5.0.2"
|
||||
pre-commit = "3.3.3"
|
||||
ptpython = "3.0.23"
|
||||
ptvsd = "4.3.2"
|
||||
pytest-cov = "4.1.0"
|
||||
watchdog = "3.0.0"
|
||||
ruff = "0.0.289"
|
||||
|
||||
@@ -14,13 +14,14 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "24.09.0-dev"
|
||||
__version__ = "25.12.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
import ptvsd
|
||||
import debugpy
|
||||
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 5678))
|
||||
debugpy.listen(("0.0.0.0", 5678))
|
||||
debugpy.wait_for_client()
|
||||
|
||||
|
||||
def setup_logging():
|
||||
|
||||
@@ -36,10 +36,14 @@ def create_app():
|
||||
from .metrics import request as request_metrics
|
||||
from .models import db, users
|
||||
from .utils import sentry
|
||||
from .version_check import reset_new_version_status
|
||||
|
||||
sentry.init()
|
||||
app = Redash()
|
||||
|
||||
# Check and update the cached version for use by the client
|
||||
reset_new_version_status()
|
||||
|
||||
security.init_app(app)
|
||||
request_metrics.init_app(app)
|
||||
db.init_app(app)
|
||||
|
||||
@@ -4,7 +4,7 @@ import requests
|
||||
from authlib.integrations.flask_client import OAuth
|
||||
from flask import Blueprint, flash, redirect, request, session, url_for
|
||||
|
||||
from redash import models
|
||||
from redash import models, settings
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
get_next_path,
|
||||
@@ -29,6 +29,41 @@ def verify_profile(org, profile):
|
||||
return False
|
||||
|
||||
|
||||
def get_user_profile(access_token, logger):
|
||||
headers = {"Authorization": f"OAuth {access_token}"}
|
||||
response = requests.get("https://www.googleapis.com/oauth2/v1/userinfo", headers=headers)
|
||||
|
||||
if response.status_code == 401:
|
||||
logger.warning("Failed getting user profile (response code 401).")
|
||||
return None
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
def build_redirect_uri():
|
||||
scheme = settings.GOOGLE_OAUTH_SCHEME_OVERRIDE or None
|
||||
return url_for(".callback", _external=True, _scheme=scheme)
|
||||
|
||||
|
||||
def build_next_path(org_slug=None):
|
||||
next_path = request.args.get("next")
|
||||
if not next_path:
|
||||
if org_slug is None:
|
||||
org_slug = session.get("org_slug")
|
||||
|
||||
scheme = None
|
||||
if settings.GOOGLE_OAUTH_SCHEME_OVERRIDE:
|
||||
scheme = settings.GOOGLE_OAUTH_SCHEME_OVERRIDE
|
||||
|
||||
next_path = url_for(
|
||||
"redash.index",
|
||||
org_slug=org_slug,
|
||||
_external=True,
|
||||
_scheme=scheme,
|
||||
)
|
||||
return next_path
|
||||
|
||||
|
||||
def create_google_oauth_blueprint(app):
|
||||
oauth = OAuth(app)
|
||||
|
||||
@@ -36,23 +71,12 @@ def create_google_oauth_blueprint(app):
|
||||
blueprint = Blueprint("google_oauth", __name__)
|
||||
|
||||
CONF_URL = "https://accounts.google.com/.well-known/openid-configuration"
|
||||
oauth = OAuth(app)
|
||||
oauth.register(
|
||||
name="google",
|
||||
server_metadata_url=CONF_URL,
|
||||
client_kwargs={"scope": "openid email profile"},
|
||||
)
|
||||
|
||||
def get_user_profile(access_token):
|
||||
headers = {"Authorization": "OAuth {}".format(access_token)}
|
||||
response = requests.get("https://www.googleapis.com/oauth2/v1/userinfo", headers=headers)
|
||||
|
||||
if response.status_code == 401:
|
||||
logger.warning("Failed getting user profile (response code 401).")
|
||||
return None
|
||||
|
||||
return response.json()
|
||||
|
||||
@blueprint.route("/<org_slug>/oauth/google", endpoint="authorize_org")
|
||||
def org_login(org_slug):
|
||||
session["org_slug"] = current_org.slug
|
||||
@@ -60,9 +84,9 @@ def create_google_oauth_blueprint(app):
|
||||
|
||||
@blueprint.route("/oauth/google", endpoint="authorize")
|
||||
def login():
|
||||
redirect_uri = url_for(".callback", _external=True)
|
||||
redirect_uri = build_redirect_uri()
|
||||
|
||||
next_path = request.args.get("next", url_for("redash.index", org_slug=session.get("org_slug")))
|
||||
next_path = build_next_path()
|
||||
logger.debug("Callback url: %s", redirect_uri)
|
||||
logger.debug("Next is: %s", next_path)
|
||||
|
||||
@@ -86,7 +110,7 @@ def create_google_oauth_blueprint(app):
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for("redash.login"))
|
||||
|
||||
profile = get_user_profile(access_token)
|
||||
profile = get_user_profile(access_token, logger)
|
||||
if profile is None:
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for("redash.login"))
|
||||
@@ -110,7 +134,9 @@ def create_google_oauth_blueprint(app):
|
||||
if user is None:
|
||||
return logout_and_redirect_to_index()
|
||||
|
||||
unsafe_next_path = session.get("next_url") or url_for("redash.index", org_slug=org.slug)
|
||||
unsafe_next_path = session.get("next_url")
|
||||
if not unsafe_next_path:
|
||||
unsafe_next_path = build_next_path(org.slug)
|
||||
next_path = get_next_path(unsafe_next_path)
|
||||
|
||||
return redirect(next_path)
|
||||
|
||||
@@ -5,6 +5,22 @@ from sqlalchemy.orm.exc import NoResultFound
|
||||
manager = AppGroup(help="Queries management commands.")
|
||||
|
||||
|
||||
@manager.command(name="rehash")
|
||||
def rehash():
|
||||
from redash import models
|
||||
|
||||
for q in models.Query.query.all():
|
||||
old_hash = q.query_hash
|
||||
q.update_query_hash()
|
||||
new_hash = q.query_hash
|
||||
|
||||
if old_hash != new_hash:
|
||||
print(f"Query {q.id} has changed hash from {old_hash} to {new_hash}")
|
||||
models.db.session.add(q)
|
||||
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
@manager.command(name="add_tag")
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import html
|
||||
import json
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
|
||||
@@ -37,6 +39,129 @@ class Webex(BaseDestination):
|
||||
|
||||
@staticmethod
|
||||
def formatted_attachments_template(subject, description, query_link, alert_link):
|
||||
# Attempt to parse the description to find a 2D array
|
||||
try:
|
||||
# Extract the part of the description that looks like a JSON array
|
||||
start_index = description.find("[")
|
||||
end_index = description.rfind("]") + 1
|
||||
json_array_str = description[start_index:end_index]
|
||||
|
||||
# Decode HTML entities
|
||||
json_array_str = html.unescape(json_array_str)
|
||||
|
||||
# Replace single quotes with double quotes for valid JSON
|
||||
json_array_str = json_array_str.replace("'", '"')
|
||||
|
||||
# Load the JSON array
|
||||
data_array = json.loads(json_array_str)
|
||||
|
||||
# Check if it's a 2D array
|
||||
if isinstance(data_array, list) and all(isinstance(i, list) for i in data_array):
|
||||
# Create a table for the Adaptive Card
|
||||
table_rows = []
|
||||
for row in data_array:
|
||||
table_rows.append(
|
||||
{
|
||||
"type": "ColumnSet",
|
||||
"columns": [
|
||||
{"type": "Column", "items": [{"type": "TextBlock", "text": str(item), "wrap": True}]}
|
||||
for item in row
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
# Create the body of the card with the table
|
||||
body = (
|
||||
[
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{subject}",
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{description[:start_index]}",
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
]
|
||||
+ table_rows
|
||||
+ [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Fallback to the original description if no valid 2D array is found
|
||||
body = [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{subject}",
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{description}",
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
]
|
||||
except json.JSONDecodeError:
|
||||
# If parsing fails, fallback to the original description
|
||||
body = [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{subject}",
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"{description}",
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
]
|
||||
|
||||
return [
|
||||
{
|
||||
"contentType": "application/vnd.microsoft.card.adaptive",
|
||||
@@ -44,44 +169,7 @@ class Webex(BaseDestination):
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type": "AdaptiveCard",
|
||||
"version": "1.0",
|
||||
"body": [
|
||||
{
|
||||
"type": "ColumnSet",
|
||||
"columns": [
|
||||
{
|
||||
"type": "Column",
|
||||
"width": 4,
|
||||
"items": [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": {subject},
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": {description},
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
"body": body,
|
||||
},
|
||||
}
|
||||
]
|
||||
@@ -116,6 +204,10 @@ class Webex(BaseDestination):
|
||||
|
||||
# destinations is guaranteed to be a comma-separated string
|
||||
for destination_id in destinations.split(","):
|
||||
destination_id = destination_id.strip() # Remove any leading or trailing whitespace
|
||||
if not destination_id: # Check if the destination_id is empty or blank
|
||||
continue # Skip to the next iteration if it's empty or blank
|
||||
|
||||
payload = deepcopy(template_payload)
|
||||
payload[payload_tag] = destination_id
|
||||
self.post_message(payload, headers)
|
||||
|
||||
@@ -42,7 +42,7 @@ class Webhook(BaseDestination):
|
||||
auth = HTTPBasicAuth(options.get("username"), options.get("password")) if options.get("username") else None
|
||||
resp = requests.post(
|
||||
options.get("url"),
|
||||
data=json_dumps(data),
|
||||
data=json_dumps(data).encode("utf-8"),
|
||||
auth=auth,
|
||||
headers=headers,
|
||||
timeout=5.0,
|
||||
|
||||
@@ -15,6 +15,7 @@ from redash.authentication.account import (
|
||||
)
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.version_check import get_latest_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,6 +29,7 @@ def get_google_auth_url(next_path):
|
||||
|
||||
|
||||
def render_token_login_page(template, org_slug, token, invite):
|
||||
error_message = None
|
||||
try:
|
||||
user_id = validate_token(token)
|
||||
org = current_org._get_current_object()
|
||||
@@ -39,19 +41,19 @@ def render_token_login_page(template, org_slug, token, invite):
|
||||
user_id,
|
||||
org_slug,
|
||||
)
|
||||
error_message = "Your invite link is invalid. Bad user id in token. Please ask for a new one."
|
||||
except SignatureExpired:
|
||||
logger.exception("Token signature has expired. Token: %s, org=%s", token, org_slug)
|
||||
error_message = "Your invite link has expired. Please ask for a new one."
|
||||
except BadSignature:
|
||||
logger.exception("Bad signature for the token: %s, org=%s", token, org_slug)
|
||||
error_message = "Your invite link is invalid. Bad signature. Please double-check the token."
|
||||
|
||||
if error_message:
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Invalid invite link. Please ask for a new one.",
|
||||
),
|
||||
400,
|
||||
)
|
||||
except (SignatureExpired, BadSignature):
|
||||
logger.exception("Failed to verify invite token: %s, org=%s", token, org_slug)
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Your invite link has expired. Please ask for a new one.",
|
||||
error_message=error_message,
|
||||
),
|
||||
400,
|
||||
)
|
||||
@@ -253,19 +255,30 @@ def number_format_config():
|
||||
}
|
||||
|
||||
|
||||
def null_value_config():
|
||||
return {
|
||||
"nullValue": current_org.get_setting("null_value"),
|
||||
}
|
||||
|
||||
|
||||
def client_config():
|
||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||
client_config_inner = {
|
||||
client_config = {
|
||||
"newVersionAvailable": bool(get_latest_version()),
|
||||
"version": __version__,
|
||||
}
|
||||
else:
|
||||
client_config_inner = {}
|
||||
client_config = {}
|
||||
|
||||
if current_user.has_permission("admin") and current_org.get_setting("beacon_consent") is None:
|
||||
client_config["showBeaconConsentMessage"] = True
|
||||
|
||||
defaults = {
|
||||
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
"showPermissionsControl": current_org.get_setting("feature_show_permissions_control"),
|
||||
"hidePlotlyModeBar": current_org.get_setting("hide_plotly_mode_bar"),
|
||||
"disablePublicUrls": current_org.get_setting("disable_public_urls"),
|
||||
"multiByteSearchEnabled": current_org.get_setting("multi_byte_search_enabled"),
|
||||
"allowCustomJSVisualizations": settings.FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS,
|
||||
"autoPublishNamedQueries": settings.FEATURE_AUTO_PUBLISH_NAMED_QUERIES,
|
||||
"extendedAlertOptions": settings.FEATURE_EXTENDED_ALERT_OPTIONS,
|
||||
@@ -279,12 +292,13 @@ def client_config():
|
||||
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||
}
|
||||
|
||||
client_config_inner.update(defaults)
|
||||
client_config_inner.update({"basePath": base_href()})
|
||||
client_config_inner.update(date_time_format_config())
|
||||
client_config_inner.update(number_format_config())
|
||||
client_config.update(defaults)
|
||||
client_config.update({"basePath": base_href()})
|
||||
client_config.update(date_time_format_config())
|
||||
client_config.update(number_format_config())
|
||||
client_config.update(null_value_config())
|
||||
|
||||
return client_config_inner
|
||||
return client_config
|
||||
|
||||
|
||||
def messages():
|
||||
|
||||
@@ -26,6 +26,8 @@ order_map = {
|
||||
"-name": "-lowercase_name",
|
||||
"created_at": "created_at",
|
||||
"-created_at": "-created_at",
|
||||
"starred_at": "favorites-created_at",
|
||||
"-starred_at": "-favorites-created_at",
|
||||
}
|
||||
|
||||
order_results = partial(_order_results, default_order="-created_at", allowed_orders=order_map)
|
||||
|
||||
@@ -44,6 +44,8 @@ order_map = {
|
||||
"-executed_at": "-query_results-retrieved_at",
|
||||
"created_by": "users-name",
|
||||
"-created_by": "-users-name",
|
||||
"starred_at": "favorites-created_at",
|
||||
"-starred_at": "-favorites-created_at",
|
||||
}
|
||||
|
||||
order_results = partial(_order_results, default_order="-created_at", allowed_orders=order_map)
|
||||
@@ -239,6 +241,8 @@ class QueryListResource(BaseQueryListResource):
|
||||
query = models.Query.create(**query_def)
|
||||
models.db.session.add(query)
|
||||
models.db.session.commit()
|
||||
query.update_latest_result_by_query_hash()
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({"action": "create", "object_id": query.id, "object_type": "query"})
|
||||
|
||||
@@ -362,6 +366,8 @@ class QueryResource(BaseResource):
|
||||
try:
|
||||
self.update_model(query, query_def)
|
||||
models.db.session.commit()
|
||||
query.update_latest_result_by_query_hash()
|
||||
models.db.session.commit()
|
||||
except StaleDataError:
|
||||
abort(409)
|
||||
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
from flask import g, redirect, render_template, request, url_for
|
||||
from flask_login import login_user
|
||||
from wtforms import Form, PasswordField, StringField, validators
|
||||
from wtforms import BooleanField, Form, PasswordField, StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import routes
|
||||
from redash.models import Group, Organization, User, db
|
||||
from redash.tasks.general import subscribe
|
||||
|
||||
|
||||
class SetupForm(Form):
|
||||
@@ -14,6 +15,8 @@ class SetupForm(Form):
|
||||
email = EmailField("Email Address", validators=[validators.Email()])
|
||||
password = PasswordField("Password", validators=[validators.Length(6)])
|
||||
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
||||
security_notifications = BooleanField()
|
||||
newsletter = BooleanField()
|
||||
|
||||
|
||||
def create_org(org_name, user_name, email, password):
|
||||
@@ -54,6 +57,8 @@ def setup():
|
||||
return redirect("/")
|
||||
|
||||
form = SetupForm(request.form)
|
||||
form.newsletter.data = True
|
||||
form.security_notifications.data = True
|
||||
|
||||
if request.method == "POST" and form.validate():
|
||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||
@@ -61,6 +66,10 @@ def setup():
|
||||
g.org = default_org
|
||||
login_user(user)
|
||||
|
||||
# signup to newsletter if needed
|
||||
if form.newsletter.data or form.security_notifications:
|
||||
subscribe.delay(form.data)
|
||||
|
||||
return redirect(url_for("redash.index", org_slug=None))
|
||||
|
||||
return render_template("setup.html", form=form)
|
||||
|
||||
@@ -5,7 +5,7 @@ from flask import g, has_request_context
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.orm.util import _ORMJoin
|
||||
from sqlalchemy.sql.selectable import Alias
|
||||
from sqlalchemy.sql.selectable import Alias, Join
|
||||
|
||||
from redash import statsd_client
|
||||
|
||||
@@ -18,7 +18,7 @@ def _table_name_from_select_element(elt):
|
||||
if isinstance(t, Alias):
|
||||
t = t.original.froms[0]
|
||||
|
||||
while isinstance(t, _ORMJoin):
|
||||
while isinstance(t, _ORMJoin) or isinstance(t, Join):
|
||||
t = t.left
|
||||
|
||||
return t.name
|
||||
|
||||
@@ -2,6 +2,7 @@ import calendar
|
||||
import datetime
|
||||
import logging
|
||||
import numbers
|
||||
import re
|
||||
import time
|
||||
|
||||
import pytz
|
||||
@@ -228,7 +229,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
|
||||
def _sort_schema(self, schema):
|
||||
return [
|
||||
{"name": i["name"], "columns": sorted(i["columns"], key=lambda x: x["name"] if isinstance(x, dict) else x)}
|
||||
{**i, "columns": sorted(i["columns"], key=lambda x: x["name"] if isinstance(x, dict) else x)}
|
||||
for i in sorted(schema, key=lambda x: x["name"])
|
||||
]
|
||||
|
||||
@@ -387,6 +388,10 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
|
||||
|
||||
def should_schedule_next(previous_iteration, now, interval, time=None, day_of_week=None, failures=0):
|
||||
# if previous_iteration is None, it means the query has never been run before
|
||||
# so we should schedule it immediately
|
||||
if previous_iteration is None:
|
||||
return True
|
||||
# if time exists then interval > 23 hours (82800s)
|
||||
# if day_of_week exists then interval > 6 days (518400s)
|
||||
if time is None:
|
||||
@@ -560,7 +565,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
db.session.query(tag_column, usage_count)
|
||||
.group_by(tag_column)
|
||||
.filter(Query.id.in_(queries.options(load_only("id"))))
|
||||
.order_by(usage_count.desc())
|
||||
.order_by(tag_column)
|
||||
)
|
||||
return query
|
||||
|
||||
@@ -602,6 +607,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
if query.schedule.get("disabled"):
|
||||
continue
|
||||
|
||||
# Skip queries that have None for all schedule values. It's unclear whether this
|
||||
# something that can happen in practice, but we have a test case for it.
|
||||
if all(value is None for value in query.schedule.values()):
|
||||
continue
|
||||
|
||||
if query.schedule["until"]:
|
||||
schedule_until = pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d"))
|
||||
|
||||
@@ -613,7 +623,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
)
|
||||
|
||||
if should_schedule_next(
|
||||
retrieved_at or now,
|
||||
retrieved_at,
|
||||
now,
|
||||
query.schedule["interval"],
|
||||
query.schedule["time"],
|
||||
@@ -635,6 +645,43 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
|
||||
return list(outdated_queries.values())
|
||||
|
||||
@classmethod
|
||||
def _do_multi_byte_search(cls, all_queries, term, limit=None):
|
||||
# term examples:
|
||||
# - word
|
||||
# - name:word
|
||||
# - query:word
|
||||
# - "multiple words"
|
||||
# - name:"multiple words"
|
||||
# - word1 word2 word3
|
||||
# - word1 "multiple word" query:"select foo"
|
||||
tokens = re.findall(r'(?:([^:\s]+):)?(?:"([^"]+)"|(\S+))', term)
|
||||
conditions = []
|
||||
for token in tokens:
|
||||
key = None
|
||||
if token[0]:
|
||||
key = token[0]
|
||||
|
||||
if token[1]:
|
||||
value = token[1]
|
||||
else:
|
||||
value = token[2]
|
||||
|
||||
pattern = f"%{value}%"
|
||||
|
||||
if key == "id" and value.isdigit():
|
||||
conditions.append(cls.id.equal(int(value)))
|
||||
elif key == "name":
|
||||
conditions.append(cls.name.ilike(pattern))
|
||||
elif key == "query":
|
||||
conditions.append(cls.query_text.ilike(pattern))
|
||||
elif key == "description":
|
||||
conditions.append(cls.description.ilike(pattern))
|
||||
else:
|
||||
conditions.append(or_(cls.name.ilike(pattern), cls.description.ilike(pattern)))
|
||||
|
||||
return all_queries.filter(and_(*conditions)).order_by(Query.id).limit(limit)
|
||||
|
||||
@classmethod
|
||||
def search(
|
||||
cls,
|
||||
@@ -655,12 +702,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
|
||||
if multi_byte_search:
|
||||
# Since tsvector doesn't work well with CJK languages, use `ilike` too
|
||||
pattern = "%{}%".format(term)
|
||||
return (
|
||||
all_queries.filter(or_(cls.name.ilike(pattern), cls.description.ilike(pattern)))
|
||||
.order_by(Query.id)
|
||||
.limit(limit)
|
||||
)
|
||||
return cls._do_multi_byte_search(all_queries, term, limit)
|
||||
|
||||
# sort the result using the weight as defined in the search vector column
|
||||
return all_queries.search(term, sort=True).limit(limit)
|
||||
@@ -669,13 +711,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
def search_by_user(cls, term, user, limit=None, multi_byte_search=False):
|
||||
if multi_byte_search:
|
||||
# Since tsvector doesn't work well with CJK languages, use `ilike` too
|
||||
pattern = "%{}%".format(term)
|
||||
return (
|
||||
cls.by_user(user)
|
||||
.filter(or_(cls.name.ilike(pattern), cls.description.ilike(pattern)))
|
||||
.order_by(Query.id)
|
||||
.limit(limit)
|
||||
)
|
||||
return cls._do_multi_byte_search(cls.by_user(user), term, limit)
|
||||
|
||||
return cls.by_user(user).search(term, sort=True).limit(limit)
|
||||
|
||||
@@ -717,6 +753,23 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
|
||||
return db.session.execute(query, {"ids": tuple(query_ids)}).fetchall()
|
||||
|
||||
def update_latest_result_by_query_hash(self):
|
||||
query_hash = self.query_hash
|
||||
data_source_id = self.data_source_id
|
||||
query_result = (
|
||||
QueryResult.query.options(load_only("id"))
|
||||
.filter(
|
||||
QueryResult.query_hash == query_hash,
|
||||
QueryResult.data_source_id == data_source_id,
|
||||
)
|
||||
.order_by(QueryResult.retrieved_at.desc())
|
||||
.first()
|
||||
)
|
||||
if query_result:
|
||||
latest_query_data_id = query_result.id
|
||||
self.latest_query_data_id = latest_query_data_id
|
||||
db.session.add(self)
|
||||
|
||||
@classmethod
|
||||
def update_latest_result(cls, query_result):
|
||||
# TODO: Investigate how big an impact this select-before-update makes.
|
||||
@@ -899,6 +952,7 @@ def next_state(op, value, threshold):
|
||||
# boolean value is Python specific and most likely will be confusing to
|
||||
# users.
|
||||
value = str(value).lower()
|
||||
value_is_number = False
|
||||
else:
|
||||
try:
|
||||
value = float(value)
|
||||
@@ -959,9 +1013,10 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
return super(Alert, cls).get_by_id_and_org(object_id, org, Query)
|
||||
|
||||
def evaluate(self):
|
||||
data = self.query_rel.latest_query_data.data
|
||||
data = self.query_rel.latest_query_data.data if self.query_rel.latest_query_data else None
|
||||
new_state = self.UNKNOWN_STATE
|
||||
|
||||
if data["rows"] and self.options["column"] in data["rows"][0]:
|
||||
if data and data["rows"] and self.options["column"] in data["rows"][0]:
|
||||
op = OPERATORS.get(self.options["op"], lambda v, t: False)
|
||||
|
||||
if "selector" not in self.options:
|
||||
@@ -988,9 +1043,8 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
|
||||
threshold = self.options["value"]
|
||||
|
||||
new_state = next_state(op, value, threshold)
|
||||
else:
|
||||
new_state = self.UNKNOWN_STATE
|
||||
if value is not None:
|
||||
new_state = next_state(op, value, threshold)
|
||||
|
||||
return new_state
|
||||
|
||||
@@ -1127,7 +1181,7 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
|
||||
db.session.query(tag_column, usage_count)
|
||||
.group_by(tag_column)
|
||||
.filter(Dashboard.id.in_(dashboards.options(load_only("id"))))
|
||||
.order_by(usage_count.desc())
|
||||
.order_by(tag_column)
|
||||
)
|
||||
return query
|
||||
|
||||
@@ -1135,15 +1189,19 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
|
||||
def favorites(cls, user, base_query=None):
|
||||
if base_query is None:
|
||||
base_query = cls.all(user.org, user.group_ids, user.id)
|
||||
return base_query.join(
|
||||
(
|
||||
Favorite,
|
||||
and_(
|
||||
Favorite.object_type == "Dashboard",
|
||||
Favorite.object_id == Dashboard.id,
|
||||
),
|
||||
return (
|
||||
base_query.distinct(cls.lowercase_name, Dashboard.created_at, Dashboard.slug, Favorite.created_at)
|
||||
.join(
|
||||
(
|
||||
Favorite,
|
||||
and_(
|
||||
Favorite.object_type == "Dashboard",
|
||||
Favorite.object_id == Dashboard.id,
|
||||
),
|
||||
)
|
||||
)
|
||||
).filter(Favorite.user_id == user.id)
|
||||
.filter(Favorite.user_id == user.id)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def by_user(cls, user):
|
||||
|
||||
@@ -59,7 +59,7 @@ def get_status():
|
||||
|
||||
|
||||
def rq_job_ids():
|
||||
queues = Queue.all(connection=redis_connection)
|
||||
queues = Queue.all(connection=rq_redis_connection)
|
||||
|
||||
started_jobs = [StartedJobRegistry(queue=q).get_job_ids() for q in queues]
|
||||
queued_jobs = [q.job_ids for q in queues]
|
||||
|
||||
@@ -288,7 +288,10 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
return True
|
||||
|
||||
def query_is_select_no_limit(self, query):
|
||||
parsed_query = sqlparse.parse(query)[0]
|
||||
parsed_query_list = sqlparse.parse(query)
|
||||
if len(parsed_query_list) == 0:
|
||||
return False
|
||||
parsed_query = parsed_query_list[0]
|
||||
last_keyword_idx = find_last_keyword_idx(parsed_query)
|
||||
# Either invalid query or query that is not select
|
||||
if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT":
|
||||
|
||||
@@ -90,15 +90,26 @@ class Athena(BaseQueryRunner):
|
||||
"title": "Athena cost per Tb scanned (USD)",
|
||||
"default": 5,
|
||||
},
|
||||
"result_reuse_enable": {
|
||||
"type": "boolean",
|
||||
"title": "Reuse Athena query results",
|
||||
},
|
||||
"result_reuse_minutes": {
|
||||
"type": "number",
|
||||
"title": "Minutes to reuse Athena query results",
|
||||
"default": 60,
|
||||
},
|
||||
},
|
||||
"required": ["region", "s3_staging_dir"],
|
||||
"extra_options": ["glue", "catalog_ids", "cost_per_tb"],
|
||||
"extra_options": ["glue", "catalog_ids", "cost_per_tb", "result_reuse_enable", "result_reuse_minutes"],
|
||||
"order": [
|
||||
"region",
|
||||
"s3_staging_dir",
|
||||
"schema",
|
||||
"work_group",
|
||||
"cost_per_tb",
|
||||
"result_reuse_enable",
|
||||
"result_reuse_minutes",
|
||||
],
|
||||
"secret": ["aws_secret_key"],
|
||||
}
|
||||
@@ -247,6 +258,8 @@ class Athena(BaseQueryRunner):
|
||||
kms_key=self.configuration.get("kms_key", None),
|
||||
work_group=self.configuration.get("work_group", "primary"),
|
||||
formatter=SimpleFormatter(),
|
||||
result_reuse_enable=self.configuration.get("result_reuse_enable", False),
|
||||
result_reuse_minutes=self.configuration.get("result_reuse_minutes", 60),
|
||||
**self._get_iam_credentials(user=user),
|
||||
).cursor()
|
||||
|
||||
|
||||
@@ -11,12 +11,12 @@ from redash.query_runner import (
|
||||
from redash.utils import json_loads
|
||||
|
||||
try:
|
||||
from azure.kusto.data.exceptions import KustoServiceError
|
||||
from azure.kusto.data.request import (
|
||||
from azure.kusto.data import (
|
||||
ClientRequestProperties,
|
||||
KustoClient,
|
||||
KustoConnectionStringBuilder,
|
||||
)
|
||||
from azure.kusto.data.exceptions import KustoServiceError
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
@@ -37,6 +37,34 @@ TYPES_MAP = {
|
||||
}
|
||||
|
||||
|
||||
def _get_data_scanned(kusto_response):
|
||||
try:
|
||||
metadata_table = next(
|
||||
(table for table in kusto_response.tables if table.table_name == "QueryCompletionInformation"),
|
||||
None,
|
||||
)
|
||||
|
||||
if metadata_table:
|
||||
resource_usage_json = next(
|
||||
(row["Payload"] for row in metadata_table.rows if row["EventTypeName"] == "QueryResourceConsumption"),
|
||||
"{}",
|
||||
)
|
||||
resource_usage = json_loads(resource_usage_json).get("resource_usage", {})
|
||||
|
||||
data_scanned = (
|
||||
resource_usage["cache"]["shards"]["cold"]["hitbytes"]
|
||||
+ resource_usage["cache"]["shards"]["cold"]["missbytes"]
|
||||
+ resource_usage["cache"]["shards"]["hot"]["hitbytes"]
|
||||
+ resource_usage["cache"]["shards"]["hot"]["missbytes"]
|
||||
+ resource_usage["cache"]["shards"]["bypassbytes"]
|
||||
)
|
||||
|
||||
except Exception:
|
||||
data_scanned = 0
|
||||
|
||||
return int(data_scanned)
|
||||
|
||||
|
||||
class AzureKusto(BaseQueryRunner):
|
||||
should_annotate_query = False
|
||||
noop_query = "let noop = datatable (Noop:string)[1]; noop"
|
||||
@@ -44,8 +72,6 @@ class AzureKusto(BaseQueryRunner):
|
||||
def __init__(self, configuration):
|
||||
super(AzureKusto, self).__init__(configuration)
|
||||
self.syntax = "custom"
|
||||
self.client_request_properties = ClientRequestProperties()
|
||||
self.client_request_properties.application = "redash"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
@@ -60,12 +86,14 @@ class AzureKusto(BaseQueryRunner):
|
||||
},
|
||||
"azure_ad_tenant_id": {"type": "string", "title": "Azure AD Tenant Id"},
|
||||
"database": {"type": "string"},
|
||||
"msi": {"type": "boolean", "title": "Use Managed Service Identity"},
|
||||
"user_msi": {
|
||||
"type": "string",
|
||||
"title": "User-assigned managed identity client ID",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"cluster",
|
||||
"azure_ad_client_id",
|
||||
"azure_ad_client_secret",
|
||||
"azure_ad_tenant_id",
|
||||
"database",
|
||||
],
|
||||
"order": [
|
||||
@@ -91,18 +119,48 @@ class AzureKusto(BaseQueryRunner):
|
||||
return "Azure Data Explorer (Kusto)"
|
||||
|
||||
def run_query(self, query, user):
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
|
||||
connection_string=self.configuration["cluster"],
|
||||
aad_app_id=self.configuration["azure_ad_client_id"],
|
||||
app_key=self.configuration["azure_ad_client_secret"],
|
||||
authority_id=self.configuration["azure_ad_tenant_id"],
|
||||
)
|
||||
cluster = self.configuration["cluster"]
|
||||
msi = self.configuration.get("msi", False)
|
||||
# Managed Service Identity(MSI)
|
||||
if msi:
|
||||
# If user-assigned managed identity is used, the client ID must be provided
|
||||
if self.configuration.get("user_msi"):
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(
|
||||
cluster,
|
||||
client_id=self.configuration["user_msi"],
|
||||
)
|
||||
else:
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(cluster)
|
||||
# Service Principal auth
|
||||
else:
|
||||
aad_app_id = self.configuration.get("azure_ad_client_id")
|
||||
app_key = self.configuration.get("azure_ad_client_secret")
|
||||
authority_id = self.configuration.get("azure_ad_tenant_id")
|
||||
|
||||
if not (aad_app_id and app_key and authority_id):
|
||||
raise ValueError(
|
||||
"Azure AD Client ID, Client Secret, and Tenant ID are required for Service Principal authentication."
|
||||
)
|
||||
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
|
||||
connection_string=cluster,
|
||||
aad_app_id=aad_app_id,
|
||||
app_key=app_key,
|
||||
authority_id=authority_id,
|
||||
)
|
||||
|
||||
client = KustoClient(kcsb)
|
||||
|
||||
request_properties = ClientRequestProperties()
|
||||
request_properties.application = "redash"
|
||||
|
||||
if user:
|
||||
request_properties.user = user.email
|
||||
request_properties.set_option("request_description", user.email)
|
||||
|
||||
db = self.configuration["database"]
|
||||
try:
|
||||
response = client.execute(db, query, self.client_request_properties)
|
||||
response = client.execute(db, query, request_properties)
|
||||
|
||||
result_cols = response.primary_results[0].columns
|
||||
result_rows = response.primary_results[0].rows
|
||||
@@ -123,14 +181,15 @@ class AzureKusto(BaseQueryRunner):
|
||||
rows.append(row.to_dict())
|
||||
|
||||
error = None
|
||||
data = {"columns": columns, "rows": rows}
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows,
|
||||
"metadata": {"data_scanned": _get_data_scanned(response)},
|
||||
}
|
||||
|
||||
except KustoServiceError as err:
|
||||
data = None
|
||||
try:
|
||||
error = err.args[1][0]["error"]["@message"]
|
||||
except (IndexError, KeyError):
|
||||
error = err.args[1]
|
||||
error = str(err)
|
||||
|
||||
return data, error
|
||||
|
||||
@@ -143,7 +202,10 @@ class AzureKusto(BaseQueryRunner):
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
|
||||
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()
|
||||
tables_list = [
|
||||
*(schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()),
|
||||
*(schema_as_json["Databases"][self.configuration["database"]]["MaterializedViews"].values()),
|
||||
]
|
||||
|
||||
schema = {}
|
||||
|
||||
@@ -154,7 +216,9 @@ class AzureKusto(BaseQueryRunner):
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
for column in table["OrderedColumns"]:
|
||||
schema[table_name]["columns"].append(column["Name"])
|
||||
schema[table_name]["columns"].append(
|
||||
{"name": column["Name"], "type": TYPES_MAP.get(column["CslType"], None)}
|
||||
)
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -7,11 +7,12 @@ from base64 import b64decode
|
||||
from redash import settings
|
||||
from redash.query_runner import (
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_DATE,
|
||||
TYPE_DATETIME,
|
||||
TYPE_FLOAT,
|
||||
TYPE_INTEGER,
|
||||
TYPE_STRING,
|
||||
BaseQueryRunner,
|
||||
BaseSQLQueryRunner,
|
||||
InterruptException,
|
||||
JobTimeoutException,
|
||||
register,
|
||||
@@ -37,6 +38,8 @@ types_map = {
|
||||
"BOOLEAN": TYPE_BOOLEAN,
|
||||
"STRING": TYPE_STRING,
|
||||
"TIMESTAMP": TYPE_DATETIME,
|
||||
"DATETIME": TYPE_DATETIME,
|
||||
"DATE": TYPE_DATE,
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +86,7 @@ def _get_query_results(jobs, project_id, location, job_id, start_index):
|
||||
).execute()
|
||||
logging.debug("query_reply %s", query_reply)
|
||||
if not query_reply["jobComplete"]:
|
||||
time.sleep(10)
|
||||
time.sleep(1)
|
||||
return _get_query_results(jobs, project_id, location, job_id, start_index)
|
||||
|
||||
return query_reply
|
||||
@@ -95,7 +98,7 @@ def _get_total_bytes_processed_for_resp(bq_response):
|
||||
return int(bq_response.get("totalBytesProcessed", "0"))
|
||||
|
||||
|
||||
class BigQuery(BaseQueryRunner):
|
||||
class BigQuery(BaseSQLQueryRunner):
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
def __init__(self, configuration):
|
||||
@@ -153,6 +156,11 @@ class BigQuery(BaseQueryRunner):
|
||||
"secret": ["jsonKeyFile"],
|
||||
}
|
||||
|
||||
def annotate_query(self, query, metadata):
|
||||
# Remove "Job ID" before annotating the query to avoid cache misses
|
||||
metadata = {k: v for k, v in metadata.items() if k != "Job ID"}
|
||||
return super().annotate_query(query, metadata)
|
||||
|
||||
def _get_bigquery_service(self):
|
||||
socket.setdefaulttimeout(settings.BIGQUERY_HTTP_TIMEOUT)
|
||||
|
||||
@@ -212,11 +220,12 @@ class BigQuery(BaseQueryRunner):
|
||||
job_data = self._get_job_data(query)
|
||||
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
|
||||
self.current_job_id = insert_response["jobReference"]["jobId"]
|
||||
self.current_job_location = insert_response["jobReference"]["location"]
|
||||
current_row = 0
|
||||
query_reply = _get_query_results(
|
||||
jobs,
|
||||
project_id=project_id,
|
||||
location=self._get_location(),
|
||||
location=self.current_job_location,
|
||||
job_id=self.current_job_id,
|
||||
start_index=current_row,
|
||||
)
|
||||
@@ -233,13 +242,11 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
query_result_request = {
|
||||
"projectId": project_id,
|
||||
"jobId": query_reply["jobReference"]["jobId"],
|
||||
"jobId": self.current_job_id,
|
||||
"startIndex": current_row,
|
||||
"location": self.current_job_location,
|
||||
}
|
||||
|
||||
if self._get_location():
|
||||
query_result_request["location"] = self._get_location()
|
||||
|
||||
query_reply = jobs.getQueryResults(**query_result_request).execute()
|
||||
|
||||
columns = [
|
||||
@@ -301,28 +308,70 @@ class BigQuery(BaseQueryRunner):
|
||||
datasets = self._get_project_datasets(project_id)
|
||||
|
||||
query_base = """
|
||||
SELECT table_schema, table_name, field_path
|
||||
SELECT table_schema, table_name, field_path, data_type, description
|
||||
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
|
||||
WHERE table_schema NOT IN ('information_schema')
|
||||
"""
|
||||
|
||||
table_query_base = """
|
||||
SELECT table_schema, table_name, JSON_VALUE(option_value) as table_description
|
||||
FROM `{dataset_id}`.INFORMATION_SCHEMA.TABLE_OPTIONS
|
||||
WHERE table_schema NOT IN ('information_schema')
|
||||
AND option_name = 'description'
|
||||
"""
|
||||
|
||||
location_dataset_ids = {}
|
||||
schema = {}
|
||||
queries = []
|
||||
for dataset in datasets:
|
||||
dataset_id = dataset["datasetReference"]["datasetId"]
|
||||
query = query_base.format(dataset_id=dataset_id)
|
||||
queries.append(query)
|
||||
location = dataset["location"]
|
||||
if self._get_location() and location != self._get_location():
|
||||
logger.debug("dataset location is different: %s", location)
|
||||
continue
|
||||
|
||||
query = "\nUNION ALL\n".join(queries)
|
||||
results, error = self.run_query(query, None)
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
if location not in location_dataset_ids:
|
||||
location_dataset_ids[location] = []
|
||||
location_dataset_ids[location].append(dataset_id)
|
||||
|
||||
for row in results["rows"]:
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
schema[table_name]["columns"].append(row["field_path"])
|
||||
for location, datasets in location_dataset_ids.items():
|
||||
queries = []
|
||||
for dataset_id in datasets:
|
||||
query = query_base.format(dataset_id=dataset_id)
|
||||
queries.append(query)
|
||||
|
||||
query = "\nUNION ALL\n".join(queries)
|
||||
results, error = self.run_query(query, None)
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
for row in results["rows"]:
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
schema[table_name]["columns"].append(
|
||||
{
|
||||
"name": row["field_path"],
|
||||
"type": row["data_type"],
|
||||
"description": row["description"],
|
||||
}
|
||||
)
|
||||
|
||||
table_queries = []
|
||||
for dataset_id in datasets:
|
||||
table_query = table_query_base.format(dataset_id=dataset_id)
|
||||
table_queries.append(table_query)
|
||||
|
||||
table_query = "\nUNION ALL\n".join(table_queries)
|
||||
results, error = self.run_query(table_query, None)
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
for row in results["rows"]:
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
if "table_description" in row:
|
||||
schema[table_name]["description"] = row["table_description"]
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
@@ -356,7 +405,7 @@ class BigQuery(BaseQueryRunner):
|
||||
self._get_bigquery_service().jobs().cancel(
|
||||
projectId=self._get_project_id(),
|
||||
jobId=self.current_job_id,
|
||||
location=self._get_location(),
|
||||
location=self.current_job_location,
|
||||
).execute()
|
||||
|
||||
raise
|
||||
|
||||
@@ -77,7 +77,11 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
self._url = self._url._replace(netloc="{}:{}".format(self._url.hostname, port))
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query = "SELECT database, table, name FROM system.columns WHERE database NOT IN ('system')"
|
||||
query = """
|
||||
SELECT database, table, name, type as data_type
|
||||
FROM system.columns
|
||||
WHERE database NOT IN ('system', 'information_schema', 'INFORMATION_SCHEMA')
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
|
||||
@@ -90,7 +94,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
schema[table_name]["columns"].append(row["name"])
|
||||
schema[table_name]["columns"].append({"name": row["name"], "type": row["data_type"]})
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
174
redash/query_runner/duckdb.py
Normal file
174
redash/query_runner/duckdb.py
Normal file
@@ -0,0 +1,174 @@
|
||||
import logging
|
||||
|
||||
from redash.query_runner import (
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_DATE,
|
||||
TYPE_DATETIME,
|
||||
TYPE_FLOAT,
|
||||
TYPE_INTEGER,
|
||||
TYPE_STRING,
|
||||
BaseSQLQueryRunner,
|
||||
InterruptException,
|
||||
register,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import duckdb
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
# Map DuckDB types to Redash column types
|
||||
TYPES_MAP = {
|
||||
"BOOLEAN": TYPE_BOOLEAN,
|
||||
"TINYINT": TYPE_INTEGER,
|
||||
"SMALLINT": TYPE_INTEGER,
|
||||
"INTEGER": TYPE_INTEGER,
|
||||
"BIGINT": TYPE_INTEGER,
|
||||
"HUGEINT": TYPE_INTEGER,
|
||||
"REAL": TYPE_FLOAT,
|
||||
"DOUBLE": TYPE_FLOAT,
|
||||
"DECIMAL": TYPE_FLOAT,
|
||||
"VARCHAR": TYPE_STRING,
|
||||
"BLOB": TYPE_STRING,
|
||||
"DATE": TYPE_DATE,
|
||||
"TIMESTAMP": TYPE_DATETIME,
|
||||
"TIMESTAMP WITH TIME ZONE": TYPE_DATETIME,
|
||||
"TIME": TYPE_DATETIME,
|
||||
"INTERVAL": TYPE_STRING,
|
||||
"UUID": TYPE_STRING,
|
||||
"JSON": TYPE_STRING,
|
||||
"STRUCT": TYPE_STRING,
|
||||
"MAP": TYPE_STRING,
|
||||
"UNION": TYPE_STRING,
|
||||
}
|
||||
|
||||
|
||||
class DuckDB(BaseSQLQueryRunner):
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self.dbpath = configuration.get("dbpath", ":memory:")
|
||||
exts = configuration.get("extensions", "")
|
||||
self.extensions = [e.strip() for e in exts.split(",") if e.strip()]
|
||||
self._connect()
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"dbpath": {
|
||||
"type": "string",
|
||||
"title": "Database Path",
|
||||
"default": ":memory:",
|
||||
},
|
||||
"extensions": {"type": "string", "title": "Extensions (comma separated)"},
|
||||
},
|
||||
"order": ["dbpath", "extensions"],
|
||||
"required": ["dbpath"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls) -> bool:
|
||||
return enabled
|
||||
|
||||
def _connect(self) -> None:
|
||||
self.con = duckdb.connect(self.dbpath)
|
||||
for ext in self.extensions:
|
||||
try:
|
||||
if "." in ext:
|
||||
prefix, name = ext.split(".", 1)
|
||||
if prefix == "community":
|
||||
self.con.execute(f"INSTALL {name} FROM community")
|
||||
self.con.execute(f"LOAD {name}")
|
||||
else:
|
||||
raise Exception("Unknown extension prefix.")
|
||||
else:
|
||||
self.con.execute(f"INSTALL {ext}")
|
||||
self.con.execute(f"LOAD {ext}")
|
||||
except Exception as e:
|
||||
logger.warning("Failed to load extension %s: %s", ext, e)
|
||||
|
||||
def run_query(self, query, user) -> tuple:
|
||||
try:
|
||||
cursor = self.con.cursor()
|
||||
cursor.execute(query)
|
||||
columns = self.fetch_columns(
|
||||
[(d[0], TYPES_MAP.get(d[1].upper(), TYPE_STRING)) for d in cursor.description]
|
||||
)
|
||||
rows = [dict(zip((col["name"] for col in columns), row)) for row in cursor.fetchall()]
|
||||
data = {"columns": columns, "rows": rows}
|
||||
return data, None
|
||||
except duckdb.InterruptException:
|
||||
raise InterruptException("Query cancelled by user.")
|
||||
except Exception as e:
|
||||
logger.exception("Error running query: %s", e)
|
||||
return None, str(e)
|
||||
|
||||
def get_schema(self, get_stats=False) -> list:
|
||||
tables_query = """
|
||||
SELECT table_schema, table_name FROM information_schema.tables
|
||||
WHERE table_schema NOT IN ('information_schema', 'pg_catalog');
|
||||
"""
|
||||
tables_results, error = self.run_query(tables_query, None)
|
||||
if error:
|
||||
raise Exception(f"Failed to get tables: {error}")
|
||||
|
||||
schema = {}
|
||||
for table_row in tables_results["rows"]:
|
||||
full_table_name = f"{table_row['table_schema']}.{table_row['table_name']}"
|
||||
schema[full_table_name] = {"name": full_table_name, "columns": []}
|
||||
|
||||
describe_query = f'DESCRIBE "{table_row["table_schema"]}"."{table_row["table_name"]}";'
|
||||
columns_results, error = self.run_query(describe_query, None)
|
||||
if error:
|
||||
logger.warning("Failed to describe table %s: %s", full_table_name, error)
|
||||
continue
|
||||
|
||||
for col_row in columns_results["rows"]:
|
||||
col = {"name": col_row["column_name"], "type": col_row["column_type"]}
|
||||
schema[full_table_name]["columns"].append(col)
|
||||
|
||||
if col_row["column_type"].startswith("STRUCT("):
|
||||
schema[full_table_name]["columns"].extend(
|
||||
self._expand_struct_fields(col["name"], col_row["column_type"])
|
||||
)
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
def _expand_struct_fields(self, base_name: str, struct_type: str) -> list:
|
||||
"""Recursively expand STRUCT(...) definitions into pseudo-columns."""
|
||||
fields = []
|
||||
# strip STRUCT( ... )
|
||||
inner = struct_type[len("STRUCT(") : -1].strip()
|
||||
# careful: nested structs, so parse comma-separated parts properly
|
||||
depth, current, parts = 0, [], []
|
||||
for c in inner:
|
||||
if c == "(":
|
||||
depth += 1
|
||||
elif c == ")":
|
||||
depth -= 1
|
||||
if c == "," and depth == 0:
|
||||
parts.append("".join(current).strip())
|
||||
current = []
|
||||
else:
|
||||
current.append(c)
|
||||
if current:
|
||||
parts.append("".join(current).strip())
|
||||
|
||||
for part in parts:
|
||||
# each part looks like: "fieldname TYPE"
|
||||
fname, ftype = part.split(" ", 1)
|
||||
colname = f"{base_name}.{fname}"
|
||||
fields.append({"name": colname, "type": ftype})
|
||||
if ftype.startswith("STRUCT("):
|
||||
fields.extend(self._expand_struct_fields(colname, ftype))
|
||||
return fields
|
||||
|
||||
|
||||
register(DuckDB)
|
||||
@@ -91,8 +91,8 @@ class BaseElasticSearch(BaseQueryRunner):
|
||||
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.server_url = self.configuration["server"]
|
||||
if self.server_url[-1] == "/":
|
||||
self.server_url = self.configuration.get("server", "")
|
||||
if self.server_url and self.server_url[-1] == "/":
|
||||
self.server_url = self.server_url[:-1]
|
||||
|
||||
basic_auth_user = self.configuration.get("basic_auth_user", None)
|
||||
|
||||
@@ -34,9 +34,13 @@ class ResultSet:
|
||||
|
||||
def parse_issue(issue, field_mapping): # noqa: C901
|
||||
result = OrderedDict()
|
||||
result["key"] = issue["key"]
|
||||
|
||||
for k, v in issue["fields"].items(): #
|
||||
# Handle API v3 response format: key field may be missing, use id as fallback
|
||||
result["key"] = issue.get("key", issue.get("id", "unknown"))
|
||||
|
||||
# Handle API v3 response format: fields may be missing
|
||||
fields = issue.get("fields", {})
|
||||
for k, v in fields.items(): #
|
||||
output_name = field_mapping.get_output_field_name(k)
|
||||
member_names = field_mapping.get_dict_members(k)
|
||||
|
||||
@@ -98,7 +102,9 @@ def parse_issues(data, field_mapping):
|
||||
|
||||
def parse_count(data):
|
||||
results = ResultSet()
|
||||
results.add_row({"count": data["total"]})
|
||||
# API v3 may not return 'total' field, fallback to counting issues
|
||||
count = data.get("total", len(data.get("issues", [])))
|
||||
results.add_row({"count": count})
|
||||
return results
|
||||
|
||||
|
||||
@@ -160,18 +166,26 @@ class JiraJQL(BaseHTTPQueryRunner):
|
||||
self.syntax = "json"
|
||||
|
||||
def run_query(self, query, user):
|
||||
jql_url = "{}/rest/api/2/search".format(self.configuration["url"])
|
||||
# Updated to API v3 endpoint, fix double slash issue
|
||||
jql_url = "{}/rest/api/3/search/jql".format(self.configuration["url"].rstrip("/"))
|
||||
|
||||
query = json_loads(query)
|
||||
query_type = query.pop("queryType", "select")
|
||||
field_mapping = FieldMapping(query.pop("fieldMapping", {}))
|
||||
|
||||
# API v3 requires mandatory jql parameter with restrictions
|
||||
if "jql" not in query or not query["jql"]:
|
||||
query["jql"] = "created >= -30d order by created DESC"
|
||||
|
||||
if query_type == "count":
|
||||
query["maxResults"] = 1
|
||||
query["fields"] = ""
|
||||
else:
|
||||
query["maxResults"] = query.get("maxResults", 1000)
|
||||
|
||||
if "fields" not in query:
|
||||
query["fields"] = "*all"
|
||||
|
||||
response, error = self.get_response(jql_url, params=query)
|
||||
if error is not None:
|
||||
return None, error
|
||||
@@ -182,17 +196,15 @@ class JiraJQL(BaseHTTPQueryRunner):
|
||||
results = parse_count(data)
|
||||
else:
|
||||
results = parse_issues(data, field_mapping)
|
||||
index = data["startAt"] + data["maxResults"]
|
||||
|
||||
while data["total"] > index:
|
||||
query["startAt"] = index
|
||||
# API v3 uses token-based pagination instead of startAt/total
|
||||
while not data.get("isLast", True) and "nextPageToken" in data:
|
||||
query["nextPageToken"] = data["nextPageToken"]
|
||||
response, error = self.get_response(jql_url, params=query)
|
||||
if error is not None:
|
||||
return None, error
|
||||
|
||||
data = response.json()
|
||||
index = data["startAt"] + data["maxResults"]
|
||||
|
||||
addl_results = parse_issues(data, field_mapping)
|
||||
results.merge(addl_results)
|
||||
|
||||
|
||||
@@ -188,7 +188,7 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
self.db_name = self.configuration["dbName"]
|
||||
self.db_name = self.configuration.get("dbName", "")
|
||||
|
||||
self.is_replica_set = (
|
||||
True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
@@ -215,10 +215,10 @@ class MongoDB(BaseQueryRunner):
|
||||
if readPreference:
|
||||
kwargs["readPreference"] = readPreference
|
||||
|
||||
if "username" in self.configuration:
|
||||
if self.configuration.get("username"):
|
||||
kwargs["username"] = self.configuration["username"]
|
||||
|
||||
if "password" in self.configuration:
|
||||
if self.configuration.get("password"):
|
||||
kwargs["password"] = self.configuration["password"]
|
||||
|
||||
db_connection = pymongo.MongoClient(self.configuration["connectionString"], **kwargs)
|
||||
|
||||
@@ -150,9 +150,11 @@ class Mysql(BaseSQLQueryRunner):
|
||||
query = """
|
||||
SELECT col.table_schema as table_schema,
|
||||
col.table_name as table_name,
|
||||
col.column_name as column_name
|
||||
col.column_name as column_name,
|
||||
col.data_type as data_type,
|
||||
col.column_comment as column_comment
|
||||
FROM `information_schema`.`columns` col
|
||||
WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
|
||||
WHERE LOWER(col.table_schema) NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
@@ -169,7 +171,38 @@ class Mysql(BaseSQLQueryRunner):
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
schema[table_name]["columns"].append(row["column_name"])
|
||||
schema[table_name]["columns"].append(
|
||||
{
|
||||
"name": row["column_name"],
|
||||
"type": row["data_type"],
|
||||
"description": row["column_comment"],
|
||||
}
|
||||
)
|
||||
|
||||
table_query = """
|
||||
SELECT col.table_schema as table_schema,
|
||||
col.table_name as table_name,
|
||||
col.table_comment as table_comment
|
||||
FROM `information_schema`.`tables` col
|
||||
WHERE LOWER(col.table_schema) NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys'); \
|
||||
"""
|
||||
|
||||
results, error = self.run_query(table_query, None)
|
||||
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
for row in results["rows"]:
|
||||
if row["table_schema"] != self.configuration["db"]:
|
||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||
else:
|
||||
table_name = row["table_name"]
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
if "table_comment" in row and row["table_comment"]:
|
||||
schema[table_name]["description"] = row["table_comment"]
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -138,6 +138,15 @@ def _get_ssl_config(configuration):
|
||||
return ssl_config
|
||||
|
||||
|
||||
def _parse_dsn(configuration):
|
||||
standard_params = {"user", "password", "host", "port", "dbname"}
|
||||
params = psycopg2.extensions.parse_dsn(configuration.get("dsn", ""))
|
||||
overlap = standard_params.intersection(params.keys())
|
||||
if overlap:
|
||||
raise ValueError("Extra parameters may not contain {}".format(overlap))
|
||||
return params
|
||||
|
||||
|
||||
class PostgreSQL(BaseSQLQueryRunner):
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
@@ -151,6 +160,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
"host": {"type": "string", "default": "127.0.0.1"},
|
||||
"port": {"type": "number", "default": 5432},
|
||||
"dbname": {"type": "string", "title": "Database Name"},
|
||||
"dsn": {"type": "string", "default": "application_name=redash", "title": "Parameters"},
|
||||
"sslmode": {
|
||||
"type": "string",
|
||||
"title": "SSL Mode",
|
||||
@@ -205,24 +215,15 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
|
||||
def _get_tables(self, schema):
|
||||
"""
|
||||
relkind constants per https://www.postgresql.org/docs/10/static/catalog-pg-class.html
|
||||
r = regular table
|
||||
v = view
|
||||
relkind constants from https://www.postgresql.org/docs/current/catalog-pg-class.html
|
||||
m = materialized view
|
||||
f = foreign table
|
||||
p = partitioned table (new in 10)
|
||||
---
|
||||
i = index
|
||||
S = sequence
|
||||
t = TOAST table
|
||||
c = composite type
|
||||
"""
|
||||
|
||||
query = """
|
||||
SELECT s.nspname as table_schema,
|
||||
c.relname as table_name,
|
||||
a.attname as column_name,
|
||||
null as data_type
|
||||
SELECT s.nspname AS table_schema,
|
||||
c.relname AS table_name,
|
||||
a.attname AS column_name,
|
||||
NULL AS data_type
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace s
|
||||
ON c.relnamespace = s.oid
|
||||
@@ -231,8 +232,8 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
ON a.attrelid = c.oid
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
WHERE c.relkind IN ('m', 'f', 'p')
|
||||
AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
||||
WHERE c.relkind = 'm'
|
||||
AND has_table_privilege(quote_ident(s.nspname) || '.' || quote_ident(c.relname), 'select')
|
||||
AND has_schema_privilege(s.nspname, 'usage')
|
||||
|
||||
UNION
|
||||
@@ -243,6 +244,8 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND has_table_privilege(quote_ident(table_schema) || '.' || quote_ident(table_name), 'select')
|
||||
AND has_schema_privilege(table_schema, 'usage')
|
||||
"""
|
||||
|
||||
self._get_definitions(schema, query)
|
||||
@@ -251,6 +254,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
|
||||
def _get_connection(self):
|
||||
self.ssl_config = _get_ssl_config(self.configuration)
|
||||
self.dsn = _parse_dsn(self.configuration)
|
||||
connection = psycopg2.connect(
|
||||
user=self.configuration.get("user"),
|
||||
password=self.configuration.get("password"),
|
||||
@@ -259,6 +263,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
dbname=self.configuration.get("dbname"),
|
||||
async_=True,
|
||||
**self.ssl_config,
|
||||
**self.dsn,
|
||||
)
|
||||
|
||||
return connection
|
||||
|
||||
@@ -55,12 +55,13 @@ class Script(BaseQueryRunner):
|
||||
def __init__(self, configuration):
|
||||
super(Script, self).__init__(configuration)
|
||||
|
||||
path = self.configuration.get("path", "")
|
||||
# If path is * allow any execution path
|
||||
if self.configuration["path"] == "*":
|
||||
if path == "*":
|
||||
return
|
||||
|
||||
# Poor man's protection against running scripts from outside the scripts directory
|
||||
if self.configuration["path"].find("../") > -1:
|
||||
if path.find("../") > -1:
|
||||
raise ValueError("Scripts can only be run from the configured scripts directory")
|
||||
|
||||
def test_connection(self):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user