Compare commits
292 Commits
23.09.0-de
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
008d466d34 | ||
|
|
71902e5933 | ||
|
|
53eab14cef | ||
|
|
925bb91d8e | ||
|
|
ec2ca6f986 | ||
|
|
96ea0194e8 | ||
|
|
2566229b0e | ||
|
|
2776992101 | ||
|
|
85f001982e | ||
|
|
d03a2c4096 | ||
|
|
8c5890482a | ||
|
|
10ce280a96 | ||
|
|
0dd7ac3d2e | ||
|
|
4ee53a9445 | ||
|
|
c08292d90e | ||
|
|
3142131cdd | ||
|
|
530c1a0734 | ||
|
|
52dc1769a1 | ||
|
|
b9583c0b48 | ||
|
|
89d7f54e90 | ||
|
|
d884da2b0b | ||
|
|
f7d485082c | ||
|
|
130ab1fe1a | ||
|
|
2ff83679fe | ||
|
|
de49b73855 | ||
|
|
c12e68f5d1 | ||
|
|
baa9bbd505 | ||
|
|
349cd5d031 | ||
|
|
49277d27f8 | ||
|
|
2aae5705c9 | ||
|
|
38d0579660 | ||
|
|
673ba769c7 | ||
|
|
b922730482 | ||
|
|
ba973eb1fe | ||
|
|
d8dde6c544 | ||
|
|
d359a716a7 | ||
|
|
ba4293912b | ||
|
|
ee359120ee | ||
|
|
04a25f4327 | ||
|
|
7c22756e66 | ||
|
|
a03668f5b2 | ||
|
|
e4a841a0c5 | ||
|
|
38dc31a49b | ||
|
|
c42b15125c | ||
|
|
590d39bc8d | ||
|
|
79bbb248bb | ||
|
|
5cf0b7b038 | ||
|
|
fb1a056561 | ||
|
|
75e1ce4c9c | ||
|
|
d6c6e3bb7a | ||
|
|
821c1a9488 | ||
|
|
76eeea1f64 | ||
|
|
2ab07f9fc3 | ||
|
|
a85b9d7801 | ||
|
|
3330815081 | ||
|
|
c25c65bc04 | ||
|
|
79a4c4c9c9 | ||
|
|
58a7438cc8 | ||
|
|
c073c1e154 | ||
|
|
159a329e26 | ||
|
|
9de135c0bd | ||
|
|
285c2b6e56 | ||
|
|
b1fe2d4162 | ||
|
|
a4f92a8fb5 | ||
|
|
51ef625a30 | ||
|
|
a2611b89a3 | ||
|
|
a531597016 | ||
|
|
e59c02f497 | ||
|
|
c1a60bf6d2 | ||
|
|
72203655ec | ||
|
|
5257e39282 | ||
|
|
ec70ff4408 | ||
|
|
ed8c05f634 | ||
|
|
86b75db82e | ||
|
|
660d04b0f1 | ||
|
|
fc1e1f7a01 | ||
|
|
8725fa4737 | ||
|
|
ea0b3cbe3a | ||
|
|
714b950fde | ||
|
|
a9c9f085af | ||
|
|
a69f7fb2fe | ||
|
|
c244e75352 | ||
|
|
80f7ba1b91 | ||
|
|
d2745e5acc | ||
|
|
4114227471 | ||
|
|
8fc4ce1494 | ||
|
|
ebb0e2c9ad | ||
|
|
57a79bc96b | ||
|
|
77f108dd09 | ||
|
|
dd1a9b96da | ||
|
|
d9282b2688 | ||
|
|
28c39219af | ||
|
|
a37ef3b235 | ||
|
|
0056aa68f8 | ||
|
|
76b5a30fd9 | ||
|
|
db4fdd003e | ||
|
|
4cb32fc1c3 | ||
|
|
a6c728b99c | ||
|
|
01e036d0a9 | ||
|
|
17fe69f551 | ||
|
|
bceaab0496 | ||
|
|
70dd05916f | ||
|
|
60a12e906e | ||
|
|
ec051a8939 | ||
|
|
60d3c66a8b | ||
|
|
bd4ba96c43 | ||
|
|
10a46fd33c | ||
|
|
c874eb6b11 | ||
|
|
f3a323695f | ||
|
|
408ba78bd0 | ||
|
|
58cc49bc88 | ||
|
|
753ea846ff | ||
|
|
1b946b59ec | ||
|
|
4569191113 | ||
|
|
62890c3ec4 | ||
|
|
bd115e7f5f | ||
|
|
bd17662005 | ||
|
|
b7f22b1896 | ||
|
|
897c683980 | ||
|
|
2b974e12ed | ||
|
|
372adfed6b | ||
|
|
dbab9cadb4 | ||
|
|
06244716e6 | ||
|
|
f09760389a | ||
|
|
84e6d3cad5 | ||
|
|
3399e3761e | ||
|
|
1c48b2218b | ||
|
|
5ac5d86f5e | ||
|
|
5e4764af9c | ||
|
|
e2a39de7d1 | ||
|
|
6c68b48917 | ||
|
|
7e8a61c73d | ||
|
|
991e94dd6a | ||
|
|
2ffeecb813 | ||
|
|
3dd855aef1 | ||
|
|
713aca440a | ||
|
|
70bb684d9e | ||
|
|
4034f791c3 | ||
|
|
b9875a231b | ||
|
|
062a70cf20 | ||
|
|
c12d45077a | ||
|
|
6d6412753d | ||
|
|
275e12e7c1 | ||
|
|
77d7508cee | ||
|
|
9601660751 | ||
|
|
45c6fa0591 | ||
|
|
95ecb8e229 | ||
|
|
cb0707176c | ||
|
|
d7247f8b84 | ||
|
|
776703fab7 | ||
|
|
34cde71238 | ||
|
|
f631075be3 | ||
|
|
3f19534301 | ||
|
|
24dec192ee | ||
|
|
82d88ed4eb | ||
|
|
af0773c58a | ||
|
|
15e6583d72 | ||
|
|
4eb5f4e47f | ||
|
|
a0f5c706ff | ||
|
|
702a550659 | ||
|
|
38a06c7ab9 | ||
|
|
a6074878bb | ||
|
|
fb348c7116 | ||
|
|
24419863ec | ||
|
|
c4d3d9c683 | ||
|
|
1672cd9280 | ||
|
|
6575a6499a | ||
|
|
e360e4658e | ||
|
|
107933c363 | ||
|
|
667a696ca5 | ||
|
|
7d0d242072 | ||
|
|
d554136f70 | ||
|
|
34723e2f3e | ||
|
|
11794b3fe3 | ||
|
|
3997916d77 | ||
|
|
b09a2256dc | ||
|
|
95a45bb4dc | ||
|
|
7cd03c797c | ||
|
|
1200f9887a | ||
|
|
81d22f1eb2 | ||
|
|
2fe0326280 | ||
|
|
094984f564 | ||
|
|
52cd6ff006 | ||
|
|
939bec2114 | ||
|
|
320fddfd52 | ||
|
|
ab39283ae6 | ||
|
|
6386905616 | ||
|
|
d986b976e5 | ||
|
|
a600921c0b | ||
|
|
af2f4af8a2 | ||
|
|
49a5e74283 | ||
|
|
b98b5f2ba4 | ||
|
|
d245ff7bb1 | ||
|
|
97db492531 | ||
|
|
30e7392933 | ||
|
|
a54171f2c2 | ||
|
|
cd03da3260 | ||
|
|
4c47bef582 | ||
|
|
ec1c4d07de | ||
|
|
4d5103978b | ||
|
|
3c2c2786ed | ||
|
|
cd482e780a | ||
|
|
4d81c3148d | ||
|
|
1b1b9bd98d | ||
|
|
473cf29c9f | ||
|
|
cbde237b12 | ||
|
|
998dc31eb0 | ||
|
|
2505e8ab3b | ||
|
|
858fc4d78f | ||
|
|
3e500ea18e | ||
|
|
58bf96c298 | ||
|
|
66ef942572 | ||
|
|
9bbdb4b765 | ||
|
|
2b4b1cf7e3 | ||
|
|
9b29f26217 | ||
|
|
392b930f2d | ||
|
|
9df6f80bb7 | ||
|
|
f7b47c0436 | ||
|
|
09addaadc3 | ||
|
|
a07b8a6bd3 | ||
|
|
8bfc57430d | ||
|
|
a8c6dd0043 | ||
|
|
2d879510e4 | ||
|
|
13e61fc3a0 | ||
|
|
de1958e995 | ||
|
|
198b422eaf | ||
|
|
63cef6632e | ||
|
|
2611dcc0f1 | ||
|
|
55193fbf66 | ||
|
|
8b8dd4f68c | ||
|
|
ae77e72821 | ||
|
|
39e4ea155c | ||
|
|
a5b01bf8ee | ||
|
|
5516b427d8 | ||
|
|
de84c40868 | ||
|
|
39766a2d97 | ||
|
|
593b6ae6ed | ||
|
|
8bb1767c69 | ||
|
|
7b03e60f9d | ||
|
|
ac9f24a781 | ||
|
|
54c4a4249a | ||
|
|
36dd3e9609 | ||
|
|
69d1e03e60 | ||
|
|
a2c0c488eb | ||
|
|
ddbe0f6ce5 | ||
|
|
42108089ed | ||
|
|
d4ade51fba | ||
|
|
84d1693419 | ||
|
|
12f1050000 | ||
|
|
6b981972f0 | ||
|
|
eafe30d52c | ||
|
|
abbd4d3146 | ||
|
|
1d350853bd | ||
|
|
3edf7790fc | ||
|
|
011f9ef311 | ||
|
|
138339a8a4 | ||
|
|
0f175b7a5b | ||
|
|
0c2dc4e025 | ||
|
|
a19b17b844 | ||
|
|
09ec299e65 | ||
|
|
9461bf6479 | ||
|
|
1ae4e20d70 | ||
|
|
3d32c55531 | ||
|
|
4a36abc628 | ||
|
|
3ebf163c29 | ||
|
|
c3c54f6ca2 | ||
|
|
b13772c464 | ||
|
|
74b0f8bb58 | ||
|
|
953ed8431b | ||
|
|
0ca72d27e6 | ||
|
|
650ec90df3 | ||
|
|
b84587931f | ||
|
|
ff85a36f50 | ||
|
|
6d91c64dae | ||
|
|
ca36130e76 | ||
|
|
0993f68fa0 | ||
|
|
f109af9f30 | ||
|
|
b4e4a5a928 | ||
|
|
ca900769c2 | ||
|
|
c97afeb327 | ||
|
|
b1f738fc96 | ||
|
|
6f6d203ca9 | ||
|
|
36482f6717 | ||
|
|
3fee9f6cef | ||
|
|
cb4af6dd57 | ||
|
|
7f42bf1b65 | ||
|
|
41495ba940 | ||
|
|
9b18e1805c | ||
|
|
c2e7df098d | ||
|
|
f1d5ac0d58 | ||
|
|
710dd8c51b | ||
|
|
2a2c90a014 |
@@ -1,11 +1,11 @@
|
||||
FROM cypress/browsers:node16.18.0-chrome90-ff88
|
||||
FROM cypress/browsers:node18.12.0-chrome106-ff106
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json yarn.lock .yarnrc $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm install yarn@1.22.19 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
RUN npm install yarn@1.22.22 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
version: '2.2'
|
||||
services:
|
||||
redash:
|
||||
build: ../
|
||||
@@ -19,7 +18,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@@ -1,10 +1,8 @@
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: ../
|
||||
args:
|
||||
skip_dev_deps: "true"
|
||||
skip_ds_deps: "true"
|
||||
install_groups: "main"
|
||||
code_coverage: ${CODE_COVERAGE}
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
@@ -68,7 +66,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@@ -1,21 +1,39 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script only needs to run on the main Redash repo
|
||||
|
||||
if [ "${GITHUB_REPOSITORY}" != "getredash/redash" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the main Redash repository"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${GITHUB_REF_NAME}" != "master" ] && [ "${GITHUB_REF_NAME}" != "preview-image" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the 'master' nor 'preview-image' branch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "x${DOCKER_USER}" = "x" ] || [ "x${DOCKER_PASS}" = "x" ]; then
|
||||
echo "Skipping image build for Docker Hub, as the login details aren't available"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
set -e
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG=$VERSION.b$CIRCLE_BUILD_NUM
|
||||
VERSION_TAG="$VERSION.b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}"
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||
docker login -u "${DOCKER_USER}" -p "${DOCKER_PASS}"
|
||||
|
||||
if [ $CIRCLE_BRANCH = master ] || [ $CIRCLE_BRANCH = preview-image ]
|
||||
then
|
||||
docker build --build-arg skip_dev_deps=true -t redash/redash:preview -t redash/preview:$VERSION_TAG .
|
||||
docker push redash/redash:preview
|
||||
docker push redash/preview:$VERSION_TAG
|
||||
else
|
||||
docker build --build-arg skip_dev_deps=true -t redash/redash:$VERSION_TAG .
|
||||
docker push redash/redash:$VERSION_TAG
|
||||
fi
|
||||
DOCKERHUB_REPO="redash/redash"
|
||||
DOCKER_TAGS="-t redash/redash:preview -t redash/preview:${VERSION_TAG}"
|
||||
|
||||
echo "Built: $VERSION_TAG"
|
||||
# Build the docker container
|
||||
docker build --build-arg install_groups="main,all_ds,dev" ${DOCKER_TAGS} .
|
||||
|
||||
# Push the container to the preview build locations
|
||||
docker push "${DOCKERHUB_REPO}:preview"
|
||||
docker push "redash/preview:${VERSION_TAG}"
|
||||
|
||||
echo "Built: ${VERSION_TAG}"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
|
||||
FULL_VERSION=${VERSION}+b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}
|
||||
|
||||
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '$FULL_VERSION'/" redash/__init__.py
|
||||
sed -i "s/dev/$CIRCLE_SHA1/" client/app/version.json
|
||||
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py
|
||||
sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
client/.tmp/
|
||||
client/dist/
|
||||
node_modules/
|
||||
viz-lib/node_modules/
|
||||
.tmp/
|
||||
|
||||
98
.github/workflows/ci.yml
vendored
@@ -4,61 +4,73 @@ on:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
env:
|
||||
NODE_VERSION: 16.20.1
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-python@v4
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install flake8==6.1.0 black==23.1.0 isort==5.12.0
|
||||
- run: flake8 .
|
||||
- run: sudo pip install black==23.1.0 ruff==0.0.287
|
||||
- run: ruff check .
|
||||
- run: black --check .
|
||||
- run: isort --check-only --diff .
|
||||
|
||||
backend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/docker-compose.ci.yml
|
||||
COMPOSE_FILE: .ci/compose.ci.yaml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
docker-compose build --build-arg test_all_deps=true --build-arg skip_frontend_build=true
|
||||
docker-compose up -d
|
||||
docker compose build --build-arg install_groups="main,all_ds,dev" --build-arg skip_frontend_build=true
|
||||
docker compose up -d
|
||||
sleep 10
|
||||
- name: Create Test Database
|
||||
run: docker-compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
run: docker compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- name: List Enabled Query Runners
|
||||
run: docker-compose -p redash run --rm redash manage ds list_types
|
||||
run: docker compose -p redash run --rm redash manage ds list_types
|
||||
- name: Run Tests
|
||||
run: docker-compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
run: docker compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
- name: Copy Test Results
|
||||
run: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v3
|
||||
# with:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: backend-test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.xml
|
||||
@@ -66,39 +78,47 @@ jobs:
|
||||
frontend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: frontend-test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
@@ -110,44 +130,48 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/docker-compose.cypress.yml
|
||||
COMPOSE_FILE: .ci/compose.cypress.yaml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
|
||||
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
|
||||
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
# PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
# CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
# CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Enable Code Coverage Report For Master Branch
|
||||
if: endsWith(github.ref, '/master')
|
||||
run: |
|
||||
echo "CODE_COVERAGE=true" >> $GITHUB_ENV
|
||||
echo "CODE_COVERAGE=true" >> "$GITHUB_ENV"
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
set -x
|
||||
yarn cypress build
|
||||
yarn cypress start -- --skip-db-seed
|
||||
docker-compose run cypress yarn cypress db-seed
|
||||
docker compose run cypress yarn cypress db-seed
|
||||
- name: Execute Cypress Tests
|
||||
run: yarn cypress run-ci
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker-compose logs
|
||||
run: docker compose logs
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage
|
||||
|
||||
85
.github/workflows/periodic-snapshot.yml
vendored
@@ -1,26 +1,85 @@
|
||||
name: Periodic Snapshot
|
||||
|
||||
# 10 minutes after midnight on the first of every month
|
||||
on:
|
||||
schedule:
|
||||
- cron: "10 0 1 * *"
|
||||
- cron: '10 0 1 * *' # 10 minutes after midnight on the first of every month
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bump:
|
||||
description: 'Bump the last digit of the version'
|
||||
required: false
|
||||
type: boolean
|
||||
version:
|
||||
description: 'Specific version to set'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
bump-version-and-tag:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ssh-key: ${{ secrets.ACTION_PUSH_KEY }}
|
||||
|
||||
- run: |
|
||||
date="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=$date '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add package.json redash/__init__.py
|
||||
git commit -m "Shapshot: ${date}"
|
||||
git push origin
|
||||
git tag $date
|
||||
git push origin $date
|
||||
git config user.name 'github-actions[bot]'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
# Function to bump the version
|
||||
bump_version() {
|
||||
local version="$1"
|
||||
local IFS=.
|
||||
read -r major minor patch <<< "$version"
|
||||
patch=$((patch + 1))
|
||||
echo "$major.$minor.$patch-dev"
|
||||
}
|
||||
|
||||
# Determine the new version tag
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
BUMP_INPUT="${{ github.event.inputs.bump }}"
|
||||
SPECIFIC_VERSION="${{ github.event.inputs.version }}"
|
||||
|
||||
# Check if both bump and specific version are provided
|
||||
if [ "$BUMP_INPUT" = "true" ] && [ -n "$SPECIFIC_VERSION" ]; then
|
||||
echo "::error::Error: Cannot specify both bump and specific version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$SPECIFIC_VERSION" ]; then
|
||||
TAG_NAME="$SPECIFIC_VERSION-dev"
|
||||
elif [ "$BUMP_INPUT" = "true" ]; then
|
||||
CURRENT_VERSION=$(grep '"version":' package.json | awk -F\" '{print $4}')
|
||||
TAG_NAME=$(bump_version "$CURRENT_VERSION")
|
||||
else
|
||||
echo "No version bump or specific version provided for manual dispatch."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
fi
|
||||
|
||||
echo "New version tag: $TAG_NAME"
|
||||
|
||||
# Update version in files
|
||||
gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${TAG_NAME}"
|
||||
git tag ${TAG_NAME}
|
||||
git push --atomic origin master refs/tags/${TAG_NAME}
|
||||
|
||||
# Run the 'preview-image' workflow if run this workflow manually
|
||||
# For more information, please see the: https://docs.github.com/en/actions/security-guides/automatic-token-authentication
|
||||
if [ "$BUMP_INPUT" = "true" ] || [ -n "$SPECIFIC_VERSION" ]; then
|
||||
gh workflow run preview-image.yml --ref $TAG_NAME
|
||||
fi
|
||||
|
||||
182
.github/workflows/preview-image.yml
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
name: Preview Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*-dev'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dockerRepository:
|
||||
description: 'Docker repository'
|
||||
required: true
|
||||
default: 'preview'
|
||||
type: choice
|
||||
options:
|
||||
- preview
|
||||
- redash
|
||||
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
|
||||
jobs:
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
- name: Skip?
|
||||
id: skip-check
|
||||
run: |
|
||||
if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then
|
||||
echo 'Docker user is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
echo skip=false >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-22.04
|
||||
- arch: arm64
|
||||
os: ubuntu-22.04-arm
|
||||
outputs:
|
||||
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
|
||||
needs:
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.push.after }}
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
.ci/update_version
|
||||
VERSION_TAG=$(jq -r .version package.json)
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push preview image to Docker Hub
|
||||
id: build-preview
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
with:
|
||||
tags: |
|
||||
${{ vars.DOCKER_USER }}/redash
|
||||
${{ vars.DOCKER_USER }}/preview
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
outputs: type=image,push-by-digest=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: Build and push release image to Docker Hub
|
||||
id: build-release
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
with:
|
||||
tags: |
|
||||
${{ vars.DOCKER_USER }}/redash:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
outputs: type=image,push-by-digest=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
|
||||
digest="${{ steps.build-preview.outputs.digest}}"
|
||||
else
|
||||
digest="${{ steps.build-release.outputs.digest}}"
|
||||
fi
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
|
||||
merge-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: build-docker-image
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Create and push manifest for the preview image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:preview \
|
||||
$(printf '${{ vars.DOCKER_USER }}/redash:preview@sha256:%s ' *)
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
|
||||
- name: Create and push manifest for the release image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
36
.github/workflows/restyled.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Restyled
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
restyled:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- uses: restyled-io/actions/setup@v4
|
||||
- id: restyler
|
||||
uses: restyled-io/actions/run@v4
|
||||
with:
|
||||
fail-on-differences: true
|
||||
|
||||
- if: |
|
||||
!cancelled() &&
|
||||
steps.restyler.outputs.success == 'true' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
base: ${{ steps.restyler.outputs.restyled-base }}
|
||||
branch: ${{ steps.restyler.outputs.restyled-head }}
|
||||
title: ${{ steps.restyler.outputs.restyled-title }}
|
||||
body: ${{ steps.restyler.outputs.restyled-body }}
|
||||
labels: "restyled"
|
||||
reviewers: ${{ github.event.pull_request.user.login }}
|
||||
delete-branch: true
|
||||
1
.gitignore
vendored
@@ -17,6 +17,7 @@ client/dist
|
||||
_build
|
||||
.vscode
|
||||
.env
|
||||
.tool-versions
|
||||
|
||||
dump.rdb
|
||||
|
||||
|
||||
@@ -1,19 +1,10 @@
|
||||
repos:
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.1.0
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: "v0.0.287"
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: "migration/.*|.git|viz-lib|node_modules|migrations|bin/upgrade"
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: requirements-txt-fixer
|
||||
- id: ruff
|
||||
|
||||
@@ -38,7 +38,9 @@ request_review: author
|
||||
#
|
||||
# These can be used to tell other automation to avoid our PRs.
|
||||
#
|
||||
labels: ["Skip CI"]
|
||||
labels:
|
||||
- restyled
|
||||
- "Skip CI"
|
||||
|
||||
# Labels to ignore
|
||||
#
|
||||
@@ -50,13 +52,13 @@ labels: ["Skip CI"]
|
||||
# Restylers to run, and how
|
||||
restylers:
|
||||
- name: black
|
||||
image: restyled/restyler-black:v19.10b0
|
||||
image: restyled/restyler-black:v24.4.2
|
||||
include:
|
||||
- redash
|
||||
- tests
|
||||
- migrations/versions
|
||||
- name: prettier
|
||||
image: restyled/restyler-prettier:v1.19.1-2
|
||||
image: restyled/restyler-prettier:v3.3.2-2
|
||||
command:
|
||||
- prettier
|
||||
- --write
|
||||
|
||||
88
Dockerfile
@@ -1,6 +1,6 @@
|
||||
FROM node:16.20.1 as frontend-builder
|
||||
FROM node:18-bookworm AS frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.19
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
@@ -14,33 +14,39 @@ USER redash
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
COPY --chown=redash scripts /frontend/scripts
|
||||
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
# Avoid issues caused by lags in disk and network I/O speeds when working on top of QEMU emulation for multi-platform image building.
|
||||
RUN yarn config set network-timeout 300000
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
RUN <<EOF
|
||||
if [ "x$skip_frontend_build" = "x" ]; then
|
||||
yarn build
|
||||
else
|
||||
mkdir -p /frontend/client/dist
|
||||
touch /frontend/client/dist/multi_org.html
|
||||
touch /frontend/client/dist/index.html
|
||||
fi
|
||||
EOF
|
||||
|
||||
FROM python:3.8-slim-buster
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
# Controls whether to install extra dependencies needed for all data sources.
|
||||
ARG skip_ds_deps
|
||||
# Controls whether to install dev dependencies.
|
||||
ARG skip_dev_deps
|
||||
# Controls whether to install all dependencies for testing.
|
||||
ARG test_all_deps
|
||||
|
||||
RUN useradd --create-home redash
|
||||
|
||||
# Ubuntu packages
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
pkg-config \
|
||||
curl \
|
||||
gnupg \
|
||||
build-essential \
|
||||
@@ -48,6 +54,8 @@ RUN apt-get update && \
|
||||
libffi-dev \
|
||||
sudo \
|
||||
git-core \
|
||||
# Kerberos, needed for MS SQL Python driver to compile on arm64
|
||||
libkrb5-dev \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# ODBC support:
|
||||
@@ -67,41 +75,41 @@ RUN apt-get update && \
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
|
||||
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/simba \
|
||||
&& dpkg -i /tmp/simba/*.deb \
|
||||
&& printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
|
||||
&& rm /tmp/simba_odbc.zip \
|
||||
&& rm -rf /tmp/simba; fi
|
||||
RUN <<EOF
|
||||
if [ "$TARGETPLATFORM" = "linux/amd64" ]; then
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg
|
||||
curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list
|
||||
apt-get update
|
||||
ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip
|
||||
chmod 600 /tmp/simba_odbc.zip
|
||||
unzip /tmp/simba_odbc.zip -d /tmp/simba
|
||||
dpkg -i /tmp/simba/*.deb
|
||||
printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini
|
||||
rm /tmp/simba_odbc.zip
|
||||
rm -rf /tmp/simba
|
||||
fi
|
||||
EOF
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Disable PIP Cache and Version Check
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV PIP_NO_CACHE_DIR=1
|
||||
ENV POETRY_VERSION=1.8.3
|
||||
ENV POETRY_HOME=/etc/poetry
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
RUN pip install pip==23.1.2;
|
||||
# Avoid crashes, including corrupted cache artifacts, when building multi-platform images with GitHub Actions.
|
||||
RUN /etc/poetry/bin/poetry cache clear pypi --all
|
||||
|
||||
# We first copy only the requirements file, to avoid rebuilding on every file change.
|
||||
COPY requirements_all_ds.txt ./
|
||||
RUN if [ "x$skip_ds_deps" = "x" ] ; then cat requirements_all_ds.txt | sed -e '/^\s*#.*$/d' -e '/^\s*$/d' | xargs -n 1 pip install || true ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
|
||||
|
||||
COPY requirements_dev.txt ./
|
||||
RUN if [ "x$skip_dev_deps" = "x" ] ; then pip install -r requirements_dev.txt ; fi
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
RUN if [ "x$test_all_deps" != "x" ] ; then pip3 install -r requirements.txt -r requirements_dev.txt -r requirements_all_ds.txt ; fi
|
||||
ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi"
|
||||
# for LDAP authentication, install with `ldap3` group
|
||||
# disabled by default due to GPL license conflict
|
||||
ARG install_groups="main,all_ds,dev"
|
||||
RUN /etc/poetry/bin/poetry install --only $install_groups $POETRY_OPTIONS
|
||||
|
||||
COPY --chown=redash . /app
|
||||
COPY --from=frontend-builder --chown=redash /frontend/client/dist /app/client/dist
|
||||
|
||||
47
Makefile
@@ -1,26 +1,44 @@
|
||||
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose build
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
|
||||
up:
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose up -d --build
|
||||
docker compose up -d redis postgres --remove-orphans
|
||||
docker compose exec -u postgres postgres psql postgres --csv \
|
||||
-1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \
|
||||
| grep -q "organizations" || make create_database
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build --remove-orphans
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
if (docker-compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \
|
||||
if (docker compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \
|
||||
else echo "postgres initializing..."; sleep 5; fi \
|
||||
done
|
||||
docker-compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
docker compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
|
||||
create_database: .env
|
||||
docker-compose run server create_db
|
||||
docker compose run server create_db
|
||||
|
||||
clean:
|
||||
docker-compose down && docker-compose rm
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
docker compose rm --stop --force
|
||||
docker compose --project-name cypress rm --stop --force
|
||||
docker image rm --force \
|
||||
cypress-server:latest cypress-worker:latest cypress-scheduler:latest \
|
||||
redash-server:latest redash-worker:latest redash-scheduler:latest
|
||||
docker container prune --force
|
||||
docker image prune --force
|
||||
docker volume prune --force
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:latest redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
|
||||
down:
|
||||
docker-compose down
|
||||
docker compose down
|
||||
|
||||
.env:
|
||||
printf "REDASH_COOKIE_SECRET=`pwgen -1s 32`\nREDASH_SECRET_KEY=`pwgen -1s 32`\n" >> .env
|
||||
@@ -31,19 +49,20 @@ format:
|
||||
pre-commit run --all-files
|
||||
|
||||
tests:
|
||||
docker-compose run server tests
|
||||
docker compose run server tests
|
||||
|
||||
lint:
|
||||
./bin/flake8_tests.sh
|
||||
ruff check .
|
||||
black --check . --diff
|
||||
|
||||
backend-unit-tests: up test_db
|
||||
docker-compose run --rm --name tests server tests
|
||||
docker compose run --rm --name tests server tests
|
||||
|
||||
frontend-unit-tests:
|
||||
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 yarn --frozen-lockfile
|
||||
yarn test
|
||||
|
||||
test: lint backend-unit-tests frontend-unit-tests
|
||||
test: backend-unit-tests frontend-unit-tests lint
|
||||
|
||||
build:
|
||||
yarn build
|
||||
@@ -55,7 +74,7 @@ start:
|
||||
yarn start
|
||||
|
||||
redis-cli:
|
||||
docker-compose run --rm redis redis-cli -h redis
|
||||
docker compose run --rm redis redis-cli -h redis
|
||||
|
||||
bash:
|
||||
docker-compose run --rm server bash
|
||||
docker compose run --rm server bash
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
</p>
|
||||
|
||||
[](https://redash.io/help/)
|
||||
[](https://datree.io/?src=badge)
|
||||
[](https://github.com/getredash/redash/actions)
|
||||
|
||||
Redash is designed to enable anyone, regardless of the level of technical sophistication, to harness the power of data big and small. SQL users leverage Redash to explore, query, visualize, and share data from any data sources. Their work in turn enables anybody in their organization to use the data. Every day, millions of users at thousands of organizations around the world use Redash to develop insights and make data-driven decisions.
|
||||
@@ -47,6 +46,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Dgraph
|
||||
- Apache Drill
|
||||
- Apache Druid
|
||||
- e6data
|
||||
- Eccenca Corporate Memory
|
||||
- Elasticsearch
|
||||
- Exasol
|
||||
@@ -61,6 +61,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Apache Hive
|
||||
- Apache Impala
|
||||
- InfluxDB
|
||||
- InfluxDBv2
|
||||
- IBM Netezza Performance Server
|
||||
- JIRA (JQL)
|
||||
- JSON
|
||||
@@ -83,6 +84,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Python
|
||||
- Qubole
|
||||
- Rockset
|
||||
- RisingWave
|
||||
- Salesforce
|
||||
- ScyllaDB
|
||||
- Shell Scripts
|
||||
@@ -90,6 +92,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- SPARQL
|
||||
- SQLite
|
||||
- TiDB
|
||||
- Tinybird
|
||||
- TreasureData
|
||||
- Trino
|
||||
- Uptycs
|
||||
|
||||
@@ -67,7 +67,7 @@ help() {
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "debug -- start Flask development server with remote debugger via debugpy"
|
||||
echo "create_db -- create database tables"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -o errexit # fail the build if any task fails
|
||||
|
||||
flake8 --version ; pip --version
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
254
bin/upgrade
@@ -1,254 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
from collections import namedtuple
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import requests
|
||||
|
||||
try:
|
||||
import semver
|
||||
except ImportError:
|
||||
print("Missing required library: semver.")
|
||||
exit(1)
|
||||
|
||||
REDASH_HOME = os.environ.get("REDASH_HOME", "/opt/redash")
|
||||
CURRENT_VERSION_PATH = "{}/current".format(REDASH_HOME)
|
||||
|
||||
|
||||
def run(cmd, cwd=None):
|
||||
if not cwd:
|
||||
cwd = REDASH_HOME
|
||||
|
||||
return subprocess.check_output(cmd, cwd=cwd, shell=True, stderr=subprocess.STDOUT)
|
||||
|
||||
|
||||
def confirm(question):
|
||||
reply = str(input(question + " (y/n): ")).lower().strip()
|
||||
|
||||
if reply[0] == "y":
|
||||
return True
|
||||
if reply[0] == "n":
|
||||
return False
|
||||
else:
|
||||
return confirm("Please use 'y' or 'n'")
|
||||
|
||||
|
||||
def version_path(version_name):
|
||||
return "{}/{}".format(REDASH_HOME, version_name)
|
||||
|
||||
|
||||
END_CODE = "\033[0m"
|
||||
|
||||
|
||||
def colored_string(text, color):
|
||||
if sys.stdout.isatty():
|
||||
return "{}{}{}".format(color, text, END_CODE)
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
def h1(text):
|
||||
print(colored_string(text, "\033[4m\033[1m"))
|
||||
|
||||
|
||||
def green(text):
|
||||
print(colored_string(text, "\033[92m"))
|
||||
|
||||
|
||||
def red(text):
|
||||
print(colored_string(text, "\033[91m"))
|
||||
|
||||
|
||||
class Release(namedtuple("Release", ("version", "download_url", "filename", "description"))):
|
||||
def v1_or_newer(self):
|
||||
return semver.compare(self.version, "1.0.0-alpha") >= 0
|
||||
|
||||
def is_newer(self, version):
|
||||
return semver.compare(self.version, version) > 0
|
||||
|
||||
@property
|
||||
def version_name(self):
|
||||
return self.filename.replace(".tar.gz", "")
|
||||
|
||||
|
||||
def get_latest_release_from_ci():
|
||||
response = requests.get(
|
||||
"https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts?branch=master"
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
exit("Failed getting releases (status code: %s)." % response.status_code)
|
||||
|
||||
tarball_asset = filter(lambda asset: asset["url"].endswith(".tar.gz"), response.json())[0]
|
||||
filename = urllib.unquote(tarball_asset["pretty_path"].split("/")[-1])
|
||||
version = filename.replace("redash.", "").replace(".tar.gz", "")
|
||||
|
||||
release = Release(version, tarball_asset["url"], filename, "")
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def get_release(channel):
|
||||
if channel == "ci":
|
||||
return get_latest_release_from_ci()
|
||||
|
||||
response = requests.get("https://version.redash.io/api/releases?channel={}".format(channel))
|
||||
release = response.json()[0]
|
||||
|
||||
filename = release["download_url"].split("/")[-1]
|
||||
release = Release(release["version"], release["download_url"], filename, release["description"])
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def link_to_current(version_name):
|
||||
green("Linking to current version...")
|
||||
run("ln -nfs {} {}".format(version_path(version_name), CURRENT_VERSION_PATH))
|
||||
|
||||
|
||||
def restart_services():
|
||||
# We're doing this instead of simple 'supervisorctl restart all' because
|
||||
# otherwise it won't notice that /opt/redash/current pointing at a different
|
||||
# directory.
|
||||
green("Restarting...")
|
||||
try:
|
||||
run("sudo /etc/init.d/redash_supervisord restart")
|
||||
except subprocess.CalledProcessError as e:
|
||||
run("sudo service supervisor restart")
|
||||
|
||||
|
||||
def update_requirements(version_name):
|
||||
green("Installing new Python packages (if needed)...")
|
||||
new_requirements_file = "{}/requirements.txt".format(version_path(version_name))
|
||||
|
||||
install_requirements = False
|
||||
|
||||
try:
|
||||
run("diff {}/requirements.txt {}".format(CURRENT_VERSION_PATH, new_requirements_file)) != 0
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode != 0:
|
||||
install_requirements = True
|
||||
|
||||
if install_requirements:
|
||||
run("sudo pip install -r {}".format(new_requirements_file))
|
||||
|
||||
|
||||
def apply_migrations(release):
|
||||
green("Running migrations (if needed)...")
|
||||
if not release.v1_or_newer():
|
||||
return apply_migrations_pre_v1(release.version_name)
|
||||
|
||||
run("sudo -u redash bin/run ./manage.py db upgrade", cwd=version_path(release.version_name))
|
||||
|
||||
|
||||
def find_migrations(version_name):
|
||||
current_migrations = set(
|
||||
[f for f in os.listdir("{}/migrations".format(CURRENT_VERSION_PATH)) if fnmatch(f, "*_*.py")]
|
||||
)
|
||||
new_migrations = sorted(
|
||||
[f for f in os.listdir("{}/migrations".format(version_path(version_name))) if fnmatch(f, "*_*.py")]
|
||||
)
|
||||
|
||||
return [m for m in new_migrations if m not in current_migrations]
|
||||
|
||||
|
||||
def apply_migrations_pre_v1(version_name):
|
||||
new_migrations = find_migrations(version_name)
|
||||
|
||||
if new_migrations:
|
||||
green("New migrations to run: ")
|
||||
print(", ".join(new_migrations))
|
||||
else:
|
||||
print("No new migrations in this version.")
|
||||
|
||||
if new_migrations and confirm("Apply new migrations? (make sure you have backup)"):
|
||||
for migration in new_migrations:
|
||||
print("Applying {}...".format(migration))
|
||||
run(
|
||||
"sudo sudo -u redash PYTHONPATH=. bin/run python migrations/{}".format(migration),
|
||||
cwd=version_path(version_name),
|
||||
)
|
||||
|
||||
|
||||
def download_and_unpack(release):
|
||||
directory_name = release.version_name
|
||||
|
||||
green("Downloading release tarball...")
|
||||
run(
|
||||
'sudo wget --header="Accept: application/octet-stream" -O {} {}'.format(release.filename, release.download_url)
|
||||
)
|
||||
green("Unpacking to: {}...".format(directory_name))
|
||||
run("sudo mkdir -p {}".format(directory_name))
|
||||
run("sudo tar -C {} -xvf {}".format(directory_name, release.filename))
|
||||
|
||||
green("Changing ownership to redash...")
|
||||
run("sudo chown redash {}".format(directory_name))
|
||||
|
||||
green("Linking .env file...")
|
||||
run("sudo ln -nfs {}/.env {}/.env".format(REDASH_HOME, version_path(directory_name)))
|
||||
|
||||
|
||||
def current_version():
|
||||
real_current_path = os.path.realpath(CURRENT_VERSION_PATH).replace(".b", "+b")
|
||||
return real_current_path.replace(REDASH_HOME + "/", "").replace("redash.", "")
|
||||
|
||||
|
||||
def verify_minimum_version():
|
||||
green("Current version: " + current_version())
|
||||
if semver.compare(current_version(), "0.12.0") < 0:
|
||||
red("You need to have Redash v0.12.0 or newer to upgrade to post v1.0.0 releases.")
|
||||
green("To upgrade to v0.12.0, run the upgrade script set to the legacy channel (--channel legacy).")
|
||||
exit(1)
|
||||
|
||||
|
||||
def show_description_and_confirm(description):
|
||||
if description:
|
||||
print(description)
|
||||
|
||||
if not confirm("Continue with upgrade?"):
|
||||
red("Cancelling upgrade.")
|
||||
exit(1)
|
||||
|
||||
|
||||
def verify_newer_version(release):
|
||||
if not release.is_newer(current_version()):
|
||||
red("The found release is not newer than your current deployed release ({}).".format(current_version()))
|
||||
if not confirm("Continue with upgrade?"):
|
||||
red("Cancelling upgrade.")
|
||||
exit(1)
|
||||
|
||||
|
||||
def deploy_release(channel):
|
||||
h1("Starting Redash upgrade:")
|
||||
|
||||
release = get_release(channel)
|
||||
green("Found version: {}".format(release.version))
|
||||
|
||||
if release.v1_or_newer():
|
||||
verify_minimum_version()
|
||||
|
||||
verify_newer_version(release)
|
||||
show_description_and_confirm(release.description)
|
||||
|
||||
try:
|
||||
download_and_unpack(release)
|
||||
update_requirements(release.version_name)
|
||||
apply_migrations(release)
|
||||
link_to_current(release.version_name)
|
||||
restart_services()
|
||||
green("Done! Enjoy.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
red("Failed running: {}".format(e.cmd))
|
||||
red("Exit status: {}\nOutput:\n{}".format(e.returncode, e.output))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--channel", help="The channel to get release from (default: stable).", default="stable")
|
||||
args = parser.parse_args()
|
||||
|
||||
deploy_release(args.channel)
|
||||
BIN
client/app/assets/images/db-logos/e6data.png
Normal file
|
After Width: | Height: | Size: 5.2 KiB |
BIN
client/app/assets/images/db-logos/influxdbv2.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 2.4 KiB |
BIN
client/app/assets/images/db-logos/risingwave.png
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
BIN
client/app/assets/images/db-logos/tinybird.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
client/app/assets/images/db-logos/yandex_disk.png
Normal file
|
After Width: | Height: | Size: 8.5 KiB |
BIN
client/app/assets/images/destinations/datadog.png
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
client/app/assets/images/destinations/webex.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
@@ -223,6 +223,7 @@ body.fixed-layout {
|
||||
}
|
||||
|
||||
.editor__left__schema {
|
||||
min-height: 120px;
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
@@ -22,7 +22,7 @@ function BeaconConsent() {
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = confirm => {
|
||||
const confirmConsent = (confirm) => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
@@ -47,7 +47,8 @@ function BeaconConsent() {
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}>
|
||||
bordered={false}
|
||||
>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
@@ -66,8 +67,7 @@ function BeaconConsent() {
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the{" "}
|
||||
<Link href="settings/organization">Organization Settings</Link> page.
|
||||
You can change this setting anytime from the <Link href="settings/general">Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -12,6 +12,7 @@ import { wrap as wrapDialog, DialogPropType } from "@/components/DialogWrapper";
|
||||
import QuerySelector from "@/components/QuerySelector";
|
||||
import { Query } from "@/services/query";
|
||||
import { useUniqueId } from "@/lib/hooks/useUniqueId";
|
||||
import "./EditParameterSettingsDialog.less";
|
||||
|
||||
const { Option } = Select;
|
||||
const formItemProps = { labelCol: { span: 6 }, wrapperCol: { span: 16 } };
|
||||
@@ -26,7 +27,7 @@ function isTypeDateRange(type) {
|
||||
|
||||
function joinExampleList(multiValuesOptions) {
|
||||
const { prefix, suffix } = multiValuesOptions;
|
||||
return ["value1", "value2", "value3"].map(value => `${prefix}${value}${suffix}`).join(",");
|
||||
return ["value1", "value2", "value3"].map((value) => `${prefix}${value}${suffix}`).join(",");
|
||||
}
|
||||
|
||||
function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
@@ -54,7 +55,7 @@ function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
|
||||
return (
|
||||
<Form.Item required label="Keyword" help={helpText} validateStatus={validateStatus} {...formItemProps}>
|
||||
<Input onChange={e => onChange(e.target.value)} autoFocus />
|
||||
<Input onChange={(e) => onChange(e.target.value)} autoFocus />
|
||||
</Form.Item>
|
||||
);
|
||||
}
|
||||
@@ -71,6 +72,8 @@ function EditParameterSettingsDialog(props) {
|
||||
const [param, setParam] = useState(clone(props.parameter));
|
||||
const [isNameValid, setIsNameValid] = useState(true);
|
||||
const [initialQuery, setInitialQuery] = useState();
|
||||
const [userInput, setUserInput] = useState(param.regex || "");
|
||||
const [isValidRegex, setIsValidRegex] = useState(true);
|
||||
|
||||
const isNew = !props.parameter.name;
|
||||
|
||||
@@ -114,6 +117,17 @@ function EditParameterSettingsDialog(props) {
|
||||
|
||||
const paramFormId = useUniqueId("paramForm");
|
||||
|
||||
const handleRegexChange = (e) => {
|
||||
setUserInput(e.target.value);
|
||||
try {
|
||||
new RegExp(e.target.value);
|
||||
setParam({ ...param, regex: e.target.value });
|
||||
setIsValidRegex(true);
|
||||
} catch (error) {
|
||||
setIsValidRegex(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
{...props.dialog.props}
|
||||
@@ -129,15 +143,17 @@ function EditParameterSettingsDialog(props) {
|
||||
disabled={!isFulfilled()}
|
||||
type="primary"
|
||||
form={paramFormId}
|
||||
data-test="SaveParameterSettings">
|
||||
data-test="SaveParameterSettings"
|
||||
>
|
||||
{isNew ? "Add Parameter" : "OK"}
|
||||
</Button>,
|
||||
]}>
|
||||
]}
|
||||
>
|
||||
<Form layout="horizontal" onFinish={onConfirm} id={paramFormId}>
|
||||
{isNew && (
|
||||
<NameInput
|
||||
name={param.name}
|
||||
onChange={name => setParam({ ...param, name })}
|
||||
onChange={(name) => setParam({ ...param, name })}
|
||||
setValidation={setIsNameValid}
|
||||
existingNames={props.existingParams}
|
||||
type={param.type}
|
||||
@@ -146,15 +162,16 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item required label="Title" {...formItemProps}>
|
||||
<Input
|
||||
value={isNull(param.title) ? getDefaultTitle(param.name) : param.title}
|
||||
onChange={e => setParam({ ...param, title: e.target.value })}
|
||||
onChange={(e) => setParam({ ...param, title: e.target.value })}
|
||||
data-test="ParameterTitleInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item label="Type" {...formItemProps}>
|
||||
<Select value={param.type} onChange={type => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Select value={param.type} onChange={(type) => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Option value="text" data-test="TextParameterTypeOption">
|
||||
Text
|
||||
</Option>
|
||||
<Option value="text-pattern">Text Pattern</Option>
|
||||
<Option value="number" data-test="NumberParameterTypeOption">
|
||||
Number
|
||||
</Option>
|
||||
@@ -180,12 +197,26 @@ function EditParameterSettingsDialog(props) {
|
||||
<Option value="datetime-range-with-seconds">Date and Time Range (with seconds)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
{param.type === "text-pattern" && (
|
||||
<Form.Item
|
||||
label="Regex"
|
||||
help={!isValidRegex ? "Invalid Regex Pattern" : "Valid Regex Pattern"}
|
||||
{...formItemProps}
|
||||
>
|
||||
<Input
|
||||
value={userInput}
|
||||
onChange={handleRegexChange}
|
||||
className={!isValidRegex ? "input-error" : ""}
|
||||
data-test="RegexPatternInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
{param.type === "enum" && (
|
||||
<Form.Item label="Values" help="Dropdown list values (newline delimited)" {...formItemProps}>
|
||||
<Input.TextArea
|
||||
rows={3}
|
||||
value={param.enumOptions}
|
||||
onChange={e => setParam({ ...param, enumOptions: e.target.value })}
|
||||
onChange={(e) => setParam({ ...param, enumOptions: e.target.value })}
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
@@ -193,7 +224,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item label="Query" help="Select query to load dropdown values from" {...formItemProps}>
|
||||
<QuerySelector
|
||||
selectedQuery={initialQuery}
|
||||
onChange={q => setParam({ ...param, queryId: q && q.id })}
|
||||
onChange={(q) => setParam({ ...param, queryId: q && q.id })}
|
||||
type="select"
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -202,7 +233,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item className="m-b-0" label=" " colon={false} {...formItemProps}>
|
||||
<Checkbox
|
||||
defaultChecked={!!param.multiValuesOptions}
|
||||
onChange={e =>
|
||||
onChange={(e) =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: e.target.checked
|
||||
@@ -214,7 +245,8 @@ function EditParameterSettingsDialog(props) {
|
||||
: null,
|
||||
})
|
||||
}
|
||||
data-test="AllowMultipleValuesCheckbox">
|
||||
data-test="AllowMultipleValuesCheckbox"
|
||||
>
|
||||
Allow multiple values
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
@@ -227,10 +259,11 @@ function EditParameterSettingsDialog(props) {
|
||||
Placed in query as: <code>{joinExampleList(param.multiValuesOptions)}</code>
|
||||
</React.Fragment>
|
||||
}
|
||||
{...formItemProps}>
|
||||
{...formItemProps}
|
||||
>
|
||||
<Select
|
||||
value={param.multiValuesOptions.prefix}
|
||||
onChange={quoteOption =>
|
||||
onChange={(quoteOption) =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: {
|
||||
@@ -240,7 +273,8 @@ function EditParameterSettingsDialog(props) {
|
||||
},
|
||||
})
|
||||
}
|
||||
data-test="QuotationSelect">
|
||||
data-test="QuotationSelect"
|
||||
>
|
||||
<Option value="">None (default)</Option>
|
||||
<Option value="'">Single Quotation Mark</Option>
|
||||
<Option value={'"'} data-test="DoubleQuotationMarkOption">
|
||||
|
||||
3
client/app/components/EditParameterSettingsDialog.less
Normal file
@@ -0,0 +1,3 @@
|
||||
.input-error {
|
||||
border-color: red !important;
|
||||
}
|
||||
@@ -101,7 +101,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
}
|
||||
|
||||
loadIframe = url => {
|
||||
loadIframe = (url) => {
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
this.setState({ loading: true, error: false });
|
||||
|
||||
@@ -116,8 +116,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
};
|
||||
|
||||
onPostMessageReceived = event => {
|
||||
if (!some(allowedDomains, domain => startsWith(event.origin, domain))) {
|
||||
onPostMessageReceived = (event) => {
|
||||
if (!some(allowedDomains, (domain) => startsWith(event.origin, domain))) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -134,7 +134,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
return helpTriggerType ? helpTriggerType[0] : this.props.href;
|
||||
};
|
||||
|
||||
openDrawer = e => {
|
||||
openDrawer = (e) => {
|
||||
// keep "open in new tab" behavior
|
||||
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
|
||||
e.preventDefault();
|
||||
@@ -144,7 +144,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
}
|
||||
};
|
||||
|
||||
closeDrawer = event => {
|
||||
closeDrawer = (event) => {
|
||||
if (event) {
|
||||
event.preventDefault();
|
||||
}
|
||||
@@ -161,7 +161,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
|
||||
const className = cx("help-trigger", this.props.className);
|
||||
const url = this.state.currentUrl;
|
||||
const isAllowedDomain = some(allowedDomains, domain => startsWith(url || targetUrl, domain));
|
||||
const isAllowedDomain = some(allowedDomains, (domain) => startsWith(url || targetUrl, domain));
|
||||
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
|
||||
|
||||
return (
|
||||
@@ -180,13 +180,15 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
)}
|
||||
</>
|
||||
) : null
|
||||
}>
|
||||
}
|
||||
>
|
||||
<Link
|
||||
href={url || this.getUrl()}
|
||||
className={className}
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}>
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}
|
||||
>
|
||||
{this.props.children}
|
||||
</Link>
|
||||
</Tooltip>
|
||||
@@ -197,7 +199,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
visible={this.state.visible}
|
||||
className={cx("help-drawer", drawerClassName)}
|
||||
destroyOnClose
|
||||
width={400}>
|
||||
width={400}
|
||||
>
|
||||
<div className="drawer-wrapper">
|
||||
<div className="drawer-menu">
|
||||
{url && (
|
||||
|
||||
@@ -33,10 +33,10 @@ export const MappingType = {
|
||||
};
|
||||
|
||||
export function parameterMappingsToEditableMappings(mappings, parameters, existingParameterNames = []) {
|
||||
return map(mappings, mapping => {
|
||||
return map(mappings, (mapping) => {
|
||||
const result = extend({}, mapping);
|
||||
const alreadyExists = includes(existingParameterNames, mapping.mapTo);
|
||||
result.param = find(parameters, p => p.name === mapping.name);
|
||||
result.param = find(parameters, (p) => p.name === mapping.name);
|
||||
switch (mapping.type) {
|
||||
case ParameterMappingType.DashboardLevel:
|
||||
result.type = alreadyExists ? MappingType.DashboardMapToExisting : MappingType.DashboardAddNew;
|
||||
@@ -62,7 +62,7 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
map(
|
||||
// convert to map
|
||||
mappings,
|
||||
mapping => {
|
||||
(mapping) => {
|
||||
const result = extend({}, mapping);
|
||||
switch (mapping.type) {
|
||||
case MappingType.DashboardAddNew:
|
||||
@@ -95,11 +95,11 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
export function synchronizeWidgetTitles(sourceMappings, widgets) {
|
||||
const affectedWidgets = [];
|
||||
|
||||
each(sourceMappings, sourceMapping => {
|
||||
each(sourceMappings, (sourceMapping) => {
|
||||
if (sourceMapping.type === ParameterMappingType.DashboardLevel) {
|
||||
each(widgets, widget => {
|
||||
each(widgets, (widget) => {
|
||||
const widgetMappings = widget.options.parameterMappings;
|
||||
each(widgetMappings, widgetMapping => {
|
||||
each(widgetMappings, (widgetMapping) => {
|
||||
// check if mapped to the same dashboard-level parameter
|
||||
if (
|
||||
widgetMapping.type === ParameterMappingType.DashboardLevel &&
|
||||
@@ -140,7 +140,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
className: "form-item",
|
||||
};
|
||||
|
||||
updateSourceType = type => {
|
||||
updateSourceType = (type) => {
|
||||
let {
|
||||
mapping: { mapTo },
|
||||
} = this.props;
|
||||
@@ -155,7 +155,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
this.updateParamMapping({ type, mapTo });
|
||||
};
|
||||
|
||||
updateParamMapping = update => {
|
||||
updateParamMapping = (update) => {
|
||||
const { onChange, mapping } = this.props;
|
||||
const newMapping = extend({}, mapping, update);
|
||||
if (newMapping.value !== mapping.value) {
|
||||
@@ -175,7 +175,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
renderMappingTypeSelector() {
|
||||
const noExisting = isEmpty(this.props.existingParamNames);
|
||||
return (
|
||||
<Radio.Group value={this.props.mapping.type} onChange={e => this.updateSourceType(e.target.value)}>
|
||||
<Radio.Group value={this.props.mapping.type} onChange={(e) => this.updateSourceType(e.target.value)}>
|
||||
<Radio className="radio" value={MappingType.DashboardAddNew} data-test="NewDashboardParameterOption">
|
||||
New dashboard parameter
|
||||
</Radio>
|
||||
@@ -205,16 +205,16 @@ export class ParameterMappingInput extends React.Component {
|
||||
<Input
|
||||
value={mapTo}
|
||||
aria-label="Parameter name (key)"
|
||||
onChange={e => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
onChange={(e) => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderDashboardMapToExisting() {
|
||||
const { mapping, existingParamNames } = this.props;
|
||||
const options = map(existingParamNames, paramName => ({ label: paramName, value: paramName }));
|
||||
const options = map(existingParamNames, (paramName) => ({ label: paramName, value: paramName }));
|
||||
|
||||
return <Select value={mapping.mapTo} onChange={mapTo => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
return <Select value={mapping.mapTo} onChange={(mapTo) => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
}
|
||||
|
||||
renderStaticValue() {
|
||||
@@ -226,7 +226,8 @@ export class ParameterMappingInput extends React.Component {
|
||||
enumOptions={mapping.param.enumOptions}
|
||||
queryId={mapping.param.queryId}
|
||||
parameter={mapping.param}
|
||||
onSelect={value => this.updateParamMapping({ value })}
|
||||
onSelect={(value) => this.updateParamMapping({ value })}
|
||||
regex={mapping.param.regex}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -284,12 +285,12 @@ class MappingEditor extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
onVisibleChange = visible => {
|
||||
onVisibleChange = (visible) => {
|
||||
if (visible) this.show();
|
||||
else this.hide();
|
||||
};
|
||||
|
||||
onChange = mapping => {
|
||||
onChange = (mapping) => {
|
||||
let inputError = null;
|
||||
|
||||
if (mapping.type === MappingType.DashboardAddNew) {
|
||||
@@ -351,7 +352,8 @@ class MappingEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderContent()}
|
||||
visible={visible}
|
||||
onVisibleChange={this.onVisibleChange}>
|
||||
onVisibleChange={this.onVisibleChange}
|
||||
>
|
||||
<Button size="small" type="dashed" data-test={`EditParamMappingButton-${mapping.param.name}`}>
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -376,14 +378,14 @@ class TitleEditor extends React.Component {
|
||||
title: "", // will be set on editing
|
||||
};
|
||||
|
||||
onPopupVisibleChange = showPopup => {
|
||||
onPopupVisibleChange = (showPopup) => {
|
||||
this.setState({
|
||||
showPopup,
|
||||
title: showPopup ? this.getMappingTitle() : "",
|
||||
});
|
||||
};
|
||||
|
||||
onEditingTitleChange = event => {
|
||||
onEditingTitleChange = (event) => {
|
||||
this.setState({ title: event.target.value });
|
||||
};
|
||||
|
||||
@@ -460,7 +462,8 @@ class TitleEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderPopover()}
|
||||
visible={this.state.showPopup}
|
||||
onVisibleChange={this.onPopupVisibleChange}>
|
||||
onVisibleChange={this.onPopupVisibleChange}
|
||||
>
|
||||
<Button size="small" type="dashed">
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -508,7 +511,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
// just to be safe, array or object
|
||||
if (typeof value === "object") {
|
||||
return map(value, v => this.getStringValue(v)).join(", ");
|
||||
return map(value, (v) => this.getStringValue(v)).join(", ");
|
||||
}
|
||||
|
||||
// rest
|
||||
@@ -574,7 +577,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
render() {
|
||||
const { existingParams } = this.props; // eslint-disable-line react/prop-types
|
||||
const dataSource = this.props.mappings.map(mapping => ({ mapping }));
|
||||
const dataSource = this.props.mappings.map((mapping) => ({ mapping }));
|
||||
|
||||
return (
|
||||
<div className="parameters-mapping-list">
|
||||
@@ -583,11 +586,11 @@ export class ParameterMappingListInput extends React.Component {
|
||||
title="Title"
|
||||
dataIndex="mapping"
|
||||
key="title"
|
||||
render={mapping => (
|
||||
render={(mapping) => (
|
||||
<TitleEditor
|
||||
existingParams={existingParams}
|
||||
mapping={mapping}
|
||||
onChange={newMapping => this.updateParamMapping(mapping, newMapping)}
|
||||
onChange={(newMapping) => this.updateParamMapping(mapping, newMapping)}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
@@ -596,19 +599,19 @@ export class ParameterMappingListInput extends React.Component {
|
||||
dataIndex="mapping"
|
||||
key="keyword"
|
||||
className="keyword"
|
||||
render={mapping => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
render={(mapping) => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Default Value"
|
||||
dataIndex="mapping"
|
||||
key="value"
|
||||
render={mapping => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
render={(mapping) => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Value Source"
|
||||
dataIndex="mapping"
|
||||
key="source"
|
||||
render={mapping => {
|
||||
render={(mapping) => {
|
||||
const existingParamsNames = existingParams
|
||||
.filter(({ type }) => type === mapping.param.type) // exclude mismatching param types
|
||||
.map(({ name }) => name); // keep names only
|
||||
|
||||
@@ -9,11 +9,12 @@ import DateRangeParameter from "@/components/dynamic-parameters/DateRangeParamet
|
||||
import QueryBasedParameterInput from "./QueryBasedParameterInput";
|
||||
|
||||
import "./ParameterValueInput.less";
|
||||
import Tooltip from "./Tooltip";
|
||||
|
||||
const multipleValuesProps = {
|
||||
maxTagCount: 3,
|
||||
maxTagTextLength: 10,
|
||||
maxTagPlaceholder: num => `+${num.length} more`,
|
||||
maxTagPlaceholder: (num) => `+${num.length} more`,
|
||||
};
|
||||
|
||||
class ParameterValueInput extends React.Component {
|
||||
@@ -25,6 +26,7 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: PropTypes.any, // eslint-disable-line react/forbid-prop-types
|
||||
onSelect: PropTypes.func,
|
||||
className: PropTypes.string,
|
||||
regex: PropTypes.string,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
@@ -35,6 +37,7 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: null,
|
||||
onSelect: () => {},
|
||||
className: "",
|
||||
regex: "",
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
@@ -45,7 +48,7 @@ class ParameterValueInput extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
componentDidUpdate = prevProps => {
|
||||
componentDidUpdate = (prevProps) => {
|
||||
const { value, parameter } = this.props;
|
||||
// if value prop updated, reset dirty state
|
||||
if (prevProps.value !== value || prevProps.parameter !== parameter) {
|
||||
@@ -56,7 +59,7 @@ class ParameterValueInput extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onSelect = value => {
|
||||
onSelect = (value) => {
|
||||
const isDirty = !isEqual(value, this.props.value);
|
||||
this.setState({ value, isDirty });
|
||||
this.props.onSelect(value, isDirty);
|
||||
@@ -93,9 +96,9 @@ class ParameterValueInput extends React.Component {
|
||||
renderEnumInput() {
|
||||
const { enumOptions, parameter } = this.props;
|
||||
const { value } = this.state;
|
||||
const enumOptionsArray = enumOptions.split("\n").filter(v => v !== "");
|
||||
const enumOptionsArray = enumOptions.split("\n").filter((v) => v !== "");
|
||||
// Antd Select doesn't handle null in multiple mode
|
||||
const normalize = val => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
const normalize = (val) => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
|
||||
return (
|
||||
<SelectWithVirtualScroll
|
||||
@@ -103,7 +106,7 @@ class ParameterValueInput extends React.Component {
|
||||
mode={parameter.multiValuesOptions ? "multiple" : "default"}
|
||||
value={normalize(value)}
|
||||
onChange={this.onSelect}
|
||||
options={map(enumOptionsArray, opt => ({ label: String(opt), value: opt }))}
|
||||
options={map(enumOptionsArray, (opt) => ({ label: String(opt), value: opt }))}
|
||||
showSearch
|
||||
showArrow
|
||||
notFoundContent={isEmpty(enumOptionsArray) ? "No options available" : null}
|
||||
@@ -133,18 +136,36 @@ class ParameterValueInput extends React.Component {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
const normalize = val => (isNaN(val) ? undefined : val);
|
||||
const normalize = (val) => (isNaN(val) ? undefined : val);
|
||||
|
||||
return (
|
||||
<InputNumber
|
||||
className={className}
|
||||
value={normalize(value)}
|
||||
aria-label="Parameter number value"
|
||||
onChange={val => this.onSelect(normalize(val))}
|
||||
onChange={(val) => this.onSelect(normalize(val))}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextPatternInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
return (
|
||||
<React.Fragment>
|
||||
<Tooltip title={`Regex to match: ${this.props.regex}`} placement="right">
|
||||
<Input
|
||||
className={className}
|
||||
value={value}
|
||||
aria-label="Parameter text pattern value"
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
@@ -155,7 +176,7 @@ class ParameterValueInput extends React.Component {
|
||||
value={value}
|
||||
aria-label="Parameter text value"
|
||||
data-test="TextParamInput"
|
||||
onChange={e => this.onSelect(e.target.value)}
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -177,6 +198,8 @@ class ParameterValueInput extends React.Component {
|
||||
return this.renderQueryBasedInput();
|
||||
case "number":
|
||||
return this.renderNumberInput();
|
||||
case "text-pattern":
|
||||
return this.renderTextPatternInput();
|
||||
default:
|
||||
return this.renderTextInput();
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { size, filter, forEach, extend } from "lodash";
|
||||
import { size, filter, forEach, extend, isEmpty } from "lodash";
|
||||
import React from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import { SortableContainer, SortableElement, DragHandle } from "@redash/viz/lib/components/sortable";
|
||||
@@ -14,7 +14,7 @@ import "./Parameters.less";
|
||||
|
||||
function updateUrl(parameters) {
|
||||
const params = extend({}, location.search);
|
||||
parameters.forEach(param => {
|
||||
parameters.forEach((param) => {
|
||||
extend(params, param.toUrlParams());
|
||||
});
|
||||
location.setSearch(params, true);
|
||||
@@ -43,16 +43,26 @@ export default class Parameters extends React.Component {
|
||||
appendSortableToParent: true,
|
||||
};
|
||||
|
||||
toCamelCase = (str) => {
|
||||
if (isEmpty(str)) {
|
||||
return "";
|
||||
}
|
||||
return str.replace(/\s+/g, "").toLowerCase();
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
const { parameters } = props;
|
||||
const { parameters, disableUrlUpdate } = props;
|
||||
this.state = { parameters };
|
||||
if (!props.disableUrlUpdate) {
|
||||
if (!disableUrlUpdate) {
|
||||
updateUrl(parameters);
|
||||
}
|
||||
const hideRegex = /hide_filter=([^&]+)/g;
|
||||
const matches = window.location.search.matchAll(hideRegex);
|
||||
this.hideValues = Array.from(matches, (match) => match[1]);
|
||||
}
|
||||
|
||||
componentDidUpdate = prevProps => {
|
||||
componentDidUpdate = (prevProps) => {
|
||||
const { parameters, disableUrlUpdate } = this.props;
|
||||
const parametersChanged = prevProps.parameters !== parameters;
|
||||
const disableUrlUpdateChanged = prevProps.disableUrlUpdate !== disableUrlUpdate;
|
||||
@@ -64,7 +74,7 @@ export default class Parameters extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
handleKeyDown = e => {
|
||||
handleKeyDown = (e) => {
|
||||
// Cmd/Ctrl/Alt + Enter
|
||||
if (e.keyCode === 13 && (e.ctrlKey || e.metaKey || e.altKey)) {
|
||||
e.stopPropagation();
|
||||
@@ -99,8 +109,8 @@ export default class Parameters extends React.Component {
|
||||
applyChanges = () => {
|
||||
const { onValuesChange, disableUrlUpdate } = this.props;
|
||||
this.setState(({ parameters }) => {
|
||||
const parametersWithPendingValues = parameters.filter(p => p.hasPendingValue);
|
||||
forEach(parameters, p => p.applyPendingValue());
|
||||
const parametersWithPendingValues = parameters.filter((p) => p.hasPendingValue);
|
||||
forEach(parameters, (p) => p.applyPendingValue());
|
||||
if (!disableUrlUpdate) {
|
||||
updateUrl(parameters);
|
||||
}
|
||||
@@ -111,7 +121,7 @@ export default class Parameters extends React.Component {
|
||||
|
||||
showParameterSettings = (parameter, index) => {
|
||||
const { onParametersEdit } = this.props;
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose(updated => {
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose((updated) => {
|
||||
this.setState(({ parameters }) => {
|
||||
const updatedParameter = extend(parameter, updated);
|
||||
parameters[index] = createParameter(updatedParameter, updatedParameter.parentQueryId);
|
||||
@@ -122,7 +132,13 @@ export default class Parameters extends React.Component {
|
||||
};
|
||||
|
||||
renderParameter(param, index) {
|
||||
if (this.hideValues.some((value) => this.toCamelCase(value) === this.toCamelCase(param.name))) {
|
||||
return null;
|
||||
}
|
||||
const { editable } = this.props;
|
||||
if (param.hidden) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div key={param.name} className="di-block" data-test={`ParameterName-${param.name}`}>
|
||||
<div className="parameter-heading">
|
||||
@@ -133,11 +149,13 @@ export default class Parameters extends React.Component {
|
||||
aria-label="Edit"
|
||||
onClick={() => this.showParameterSettings(param, index)}
|
||||
data-test={`ParameterSettings-${param.name}`}
|
||||
type="button">
|
||||
type="button"
|
||||
>
|
||||
<i className="fa fa-cog" aria-hidden="true" />
|
||||
</PlainButton>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<ParameterValueInput
|
||||
type={param.type}
|
||||
value={param.normalizedValue}
|
||||
@@ -145,6 +163,7 @@ export default class Parameters extends React.Component {
|
||||
enumOptions={param.enumOptions}
|
||||
queryId={param.queryId}
|
||||
onSelect={(value, isDirty) => this.setPendingValue(param, value, isDirty)}
|
||||
regex={param.regex}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@@ -154,7 +173,6 @@ export default class Parameters extends React.Component {
|
||||
const { parameters } = this.state;
|
||||
const { sortable, appendSortableToParent } = this.props;
|
||||
const dirtyParamCount = size(filter(parameters, "hasPendingValue"));
|
||||
|
||||
return (
|
||||
<SortableContainer
|
||||
disabled={!sortable}
|
||||
@@ -162,19 +180,22 @@ export default class Parameters extends React.Component {
|
||||
useDragHandle
|
||||
lockToContainerEdges
|
||||
helperClass="parameter-dragged"
|
||||
helperContainer={containerEl => (appendSortableToParent ? containerEl : document.body)}
|
||||
helperContainer={(containerEl) => (appendSortableToParent ? containerEl : document.body)}
|
||||
updateBeforeSortStart={this.onBeforeSortStart}
|
||||
onSortEnd={this.moveParameter}
|
||||
containerProps={{
|
||||
className: "parameter-container",
|
||||
onKeyDown: dirtyParamCount ? this.handleKeyDown : null,
|
||||
}}>
|
||||
{parameters.map((param, index) => (
|
||||
}}
|
||||
>
|
||||
{parameters &&
|
||||
parameters.map((param, index) => (
|
||||
<SortableElement key={param.name} index={index}>
|
||||
<div
|
||||
className="parameter-block"
|
||||
data-editable={sortable || null}
|
||||
data-test={`ParameterBlock-${param.name}`}>
|
||||
data-test={`ParameterBlock-${param.name}`}
|
||||
>
|
||||
{sortable && <DragHandle data-test={`DragHandle-${param.name}`} />}
|
||||
{this.renderParameter(param, index)}
|
||||
</div>
|
||||
|
||||
@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
|
||||
// DataSourcePreviewCard
|
||||
|
||||
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
|
||||
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
|
||||
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
|
||||
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
|
||||
return (
|
||||
<PreviewCard {...props} imageUrl={imageUrl} title={title}>
|
||||
|
||||
@@ -123,6 +123,7 @@
|
||||
right: 10px;
|
||||
bottom: 15px;
|
||||
height: auto;
|
||||
overflow: hidden;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,7 +96,7 @@ function EmptyState({
|
||||
}, []);
|
||||
|
||||
// Show if `onboardingMode=false` or any requested step not completed
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
|
||||
|
||||
if (!shouldShow) {
|
||||
return null;
|
||||
@@ -181,7 +181,7 @@ function EmptyState({
|
||||
];
|
||||
|
||||
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
|
||||
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
|
||||
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
|
||||
|
||||
return (
|
||||
<div className="empty-state-wrapper">
|
||||
@@ -196,7 +196,7 @@ function EmptyState({
|
||||
</div>
|
||||
<div className="empty-state__steps">
|
||||
<h4>Let's get started</h4>
|
||||
<ol>{stepsItems.map(item => item.node)}</ol>
|
||||
<ol>{stepsItems.map((item) => item.node)}</ol>
|
||||
{helpMessage}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -65,6 +65,7 @@ export const Query = PropTypes.shape({
|
||||
|
||||
export const AlertOptions = PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.oneOf(["first", "min", "max"]),
|
||||
op: PropTypes.oneOf([">", ">=", "<", "<=", "==", "!="]),
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
custom_subject: PropTypes.string,
|
||||
@@ -83,6 +84,7 @@ export const Alert = PropTypes.shape({
|
||||
query: Query,
|
||||
options: PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.string,
|
||||
op: PropTypes.string,
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
}).isRequired,
|
||||
|
||||
@@ -148,7 +148,9 @@ function EditVisualizationDialog({ dialog, visualization, query, queryResult })
|
||||
|
||||
function dismiss() {
|
||||
const optionsChanged = !isEqual(options, defaultState.originalOptions);
|
||||
confirmDialogClose(nameChanged || optionsChanged).then(dialog.dismiss);
|
||||
confirmDialogClose(nameChanged || optionsChanged)
|
||||
.then(dialog.dismiss)
|
||||
.catch(() => {});
|
||||
}
|
||||
|
||||
// When editing existing visualization chart type selector is disabled, so add only existing visualization's
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<base href="{{base_href}}" />
|
||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||
<script src="/static/unsupportedRedirect.js" async></script>
|
||||
<script src="<%= htmlWebpackPlugin.options.staticPath %>unsupportedRedirect.js" async></script>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/images/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/static/images/favicon-96x96.png" />
|
||||
|
||||
@@ -16,6 +16,7 @@ import MenuButton from "./components/MenuButton";
|
||||
import AlertView from "./AlertView";
|
||||
import AlertEdit from "./AlertEdit";
|
||||
import AlertNew from "./AlertNew";
|
||||
import notifications from "@/services/notifications";
|
||||
|
||||
const MODES = {
|
||||
NEW: 0,
|
||||
@@ -64,6 +65,7 @@ class Alert extends React.Component {
|
||||
this.setState({
|
||||
alert: {
|
||||
options: {
|
||||
selector: "first",
|
||||
op: ">",
|
||||
value: 1,
|
||||
muted: false,
|
||||
@@ -75,7 +77,7 @@ class Alert extends React.Component {
|
||||
} else {
|
||||
const { alertId } = this.props;
|
||||
AlertService.get({ id: alertId })
|
||||
.then(alert => {
|
||||
.then((alert) => {
|
||||
if (this._isMounted) {
|
||||
const canEdit = currentUser.canEdit(alert);
|
||||
|
||||
@@ -93,7 +95,7 @@ class Alert extends React.Component {
|
||||
this.onQuerySelected(alert.query);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
if (this._isMounted) {
|
||||
this.props.onError(error);
|
||||
}
|
||||
@@ -112,7 +114,7 @@ class Alert extends React.Component {
|
||||
alert.rearm = pendingRearm || null;
|
||||
|
||||
return AlertService.save(alert)
|
||||
.then(alert => {
|
||||
.then((alert) => {
|
||||
notification.success("Saved.");
|
||||
navigateTo(`alerts/${alert.id}`, true);
|
||||
this.setState({ alert, mode: MODES.VIEW });
|
||||
@@ -122,7 +124,7 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
onQuerySelected = query => {
|
||||
onQuerySelected = (query) => {
|
||||
this.setState(({ alert }) => ({
|
||||
alert: Object.assign(alert, { query }),
|
||||
queryResult: null,
|
||||
@@ -130,7 +132,7 @@ class Alert extends React.Component {
|
||||
|
||||
if (query) {
|
||||
// get cached result for column names and values
|
||||
new QueryService(query).getQueryResultPromise().then(queryResult => {
|
||||
new QueryService(query).getQueryResultPromise().then((queryResult) => {
|
||||
if (this._isMounted) {
|
||||
this.setState({ queryResult });
|
||||
let { column } = this.state.alert.options;
|
||||
@@ -146,18 +148,18 @@ class Alert extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onNameChange = name => {
|
||||
onNameChange = (name) => {
|
||||
const { alert } = this.state;
|
||||
this.setState({
|
||||
alert: Object.assign(alert, { name }),
|
||||
});
|
||||
};
|
||||
|
||||
onRearmChange = pendingRearm => {
|
||||
onRearmChange = (pendingRearm) => {
|
||||
this.setState({ pendingRearm });
|
||||
};
|
||||
|
||||
setAlertOptions = obj => {
|
||||
setAlertOptions = (obj) => {
|
||||
const { alert } = this.state;
|
||||
const options = { ...alert.options, ...obj };
|
||||
this.setState({
|
||||
@@ -177,6 +179,17 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
evaluate = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.evaluate(alert)
|
||||
.then(() => {
|
||||
notification.success("Alert evaluated. Refresh page for updated status.");
|
||||
})
|
||||
.catch(() => {
|
||||
notifications.error("Failed to evaluate alert.");
|
||||
});
|
||||
};
|
||||
|
||||
mute = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.mute(alert)
|
||||
@@ -223,7 +236,14 @@ class Alert extends React.Component {
|
||||
const { queryResult, mode, canEdit, pendingRearm } = this.state;
|
||||
|
||||
const menuButton = (
|
||||
<MenuButton doDelete={this.delete} muted={muted} mute={this.mute} unmute={this.unmute} canEdit={canEdit} />
|
||||
<MenuButton
|
||||
doDelete={this.delete}
|
||||
muted={muted}
|
||||
mute={this.mute}
|
||||
unmute={this.unmute}
|
||||
canEdit={canEdit}
|
||||
evaluate={this.evaluate}
|
||||
/>
|
||||
);
|
||||
|
||||
const commonProps = {
|
||||
@@ -258,7 +278,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/new",
|
||||
title: "New Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -266,7 +286,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId",
|
||||
title: "Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -274,6 +294,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId/edit",
|
||||
title: "Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -68,13 +68,23 @@ export default class AlertView extends React.Component {
|
||||
<>
|
||||
<Title name={name} alert={alert}>
|
||||
<DynamicComponent name="AlertView.HeaderExtra" alert={alert} />
|
||||
<Tooltip title={canEdit ? "" : "You do not have sufficient permissions to edit this alert"}>
|
||||
{canEdit ? (
|
||||
<>
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</>
|
||||
) : (
|
||||
<Tooltip title="You do not have sufficient permissions to edit this alert">
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Title>
|
||||
<div className="bg-white tiled p-20">
|
||||
<Grid.Row type="flex" gutter={16}>
|
||||
|
||||
@@ -54,23 +54,74 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
return null;
|
||||
})();
|
||||
|
||||
const columnHint = (
|
||||
let columnHint;
|
||||
|
||||
if (alertOptions.selector === "first") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "max") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Max column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.max(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "min") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Min column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.min(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div data-test="Criteria">
|
||||
<div className="input-title">
|
||||
<span className="input-label">Selector</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.selector}
|
||||
onChange={(selector) => onChange({ selector })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 80 }}
|
||||
>
|
||||
<Select.Option value="first" label="first">
|
||||
first
|
||||
</Select.Option>
|
||||
<Select.Option value="min" label="min">
|
||||
min
|
||||
</Select.Option>
|
||||
<Select.Option value="max" label="max">
|
||||
max
|
||||
</Select.Option>
|
||||
</Select>
|
||||
) : (
|
||||
<DisabledInput minWidth={60}>{alertOptions.selector}</DisabledInput>
|
||||
)}
|
||||
</div>
|
||||
<div className="input-title">
|
||||
<span className="input-label">Value column</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.column}
|
||||
onChange={column => onChange({ column })}
|
||||
onChange={(column) => onChange({ column })}
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ minWidth: 100 }}>
|
||||
{columnNames.map(name => (
|
||||
style={{ minWidth: 100 }}
|
||||
>
|
||||
{columnNames.map((name) => (
|
||||
<Select.Option key={name}>{name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
@@ -83,10 +134,11 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.op}
|
||||
onChange={op => onChange({ op })}
|
||||
onChange={(op) => onChange({ op })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 55 }}>
|
||||
style={{ width: 55 }}
|
||||
>
|
||||
<Select.Option value=">" label={CONDITIONS[">"]}>
|
||||
{CONDITIONS[">"]} greater than
|
||||
</Select.Option>
|
||||
@@ -125,7 +177,7 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
id="threshold-criterion"
|
||||
style={{ width: 90 }}
|
||||
value={alertOptions.value}
|
||||
onChange={e => onChange({ value: e.target.value })}
|
||||
onChange={(e) => onChange({ value: e.target.value })}
|
||||
/>
|
||||
) : (
|
||||
<DisabledInput minWidth={50}>{alertOptions.value}</DisabledInput>
|
||||
|
||||
@@ -11,7 +11,7 @@ import LoadingOutlinedIcon from "@ant-design/icons/LoadingOutlined";
|
||||
import EllipsisOutlinedIcon from "@ant-design/icons/EllipsisOutlined";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate, muted }) {
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const execute = useCallback(action => {
|
||||
@@ -55,6 +55,9 @@ export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={confirmDelete}>Delete</PlainButton>
|
||||
</Menu.Item>
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={() => execute(evaluate)}>Evaluate</PlainButton>
|
||||
</Menu.Item>
|
||||
</Menu>
|
||||
}>
|
||||
<Button aria-label="More actions">
|
||||
@@ -69,6 +72,7 @@ MenuButton.propTypes = {
|
||||
canEdit: PropTypes.bool.isRequired,
|
||||
mute: PropTypes.func.isRequired,
|
||||
unmute: PropTypes.func.isRequired,
|
||||
evaluate: PropTypes.func.isRequired,
|
||||
muted: PropTypes.bool,
|
||||
};
|
||||
|
||||
|
||||
@@ -119,6 +119,8 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
|
||||
managePermissions,
|
||||
gridDisabled,
|
||||
isDashboardOwnerOrAdmin,
|
||||
isDuplicating,
|
||||
duplicateDashboard,
|
||||
} = dashboardConfiguration;
|
||||
|
||||
const archive = () => {
|
||||
@@ -142,6 +144,14 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
|
||||
<Menu.Item className={cx({ hidden: gridDisabled })}>
|
||||
<PlainButton onClick={() => setEditingLayout(true)}>Edit</PlainButton>
|
||||
</Menu.Item>
|
||||
{!isDuplicating && dashboard.canEdit() && (
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={duplicateDashboard}>
|
||||
Fork <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</PlainButton>
|
||||
</Menu.Item>
|
||||
)}
|
||||
{clientConfig.showPermissionsControl && isDashboardOwnerOrAdmin && (
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={managePermissions}>Manage Permissions</PlainButton>
|
||||
|
||||
@@ -94,12 +94,12 @@ class ShareDashboardDialog extends React.Component {
|
||||
};
|
||||
|
||||
render() {
|
||||
const { dialog, dashboard } = this.props;
|
||||
|
||||
const { dialog, dashboard, hasOnlySafeQueries } = this.props;
|
||||
const headerContent = this.constructor.headerContent;
|
||||
return (
|
||||
<Modal {...dialog.props} title={this.constructor.headerContent} footer={null}>
|
||||
<Modal {...dialog.props} title={headerContent} footer={null}>
|
||||
<Form layout="horizontal">
|
||||
{!this.props.hasOnlySafeQueries && (
|
||||
{!hasOnlySafeQueries && (
|
||||
<Form.Item>
|
||||
<Alert
|
||||
message="For your security, sharing is currently not supported for dashboards containing queries with text parameters. Consider changing the text parameters in your query to a different type."
|
||||
@@ -107,6 +107,7 @@ class ShareDashboardDialog extends React.Component {
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
|
||||
<Form.Item label="Allow public access" {...this.formItemProps}>
|
||||
<Switch
|
||||
checked={dashboard.publicAccessEnabled}
|
||||
|
||||
@@ -15,6 +15,7 @@ import ShareDashboardDialog from "../components/ShareDashboardDialog";
|
||||
import useFullscreenHandler from "../../../lib/hooks/useFullscreenHandler";
|
||||
import useRefreshRateHandler from "./useRefreshRateHandler";
|
||||
import useEditModeHandler from "./useEditModeHandler";
|
||||
import useDuplicateDashboard from "./useDuplicateDashboard";
|
||||
import { policy } from "@/services/policy";
|
||||
|
||||
export { DashboardStatusEnum } from "./useEditModeHandler";
|
||||
@@ -53,6 +54,8 @@ function useDashboard(dashboardData) {
|
||||
[dashboard]
|
||||
);
|
||||
|
||||
const [isDuplicating, duplicateDashboard] = useDuplicateDashboard(dashboard);
|
||||
|
||||
const managePermissions = useCallback(() => {
|
||||
const aclUrl = `api/dashboards/${dashboard.id}/acl`;
|
||||
PermissionsEditorDialog.showModal({
|
||||
@@ -243,6 +246,8 @@ function useDashboard(dashboardData) {
|
||||
showAddTextboxDialog,
|
||||
showAddWidgetDialog,
|
||||
managePermissions,
|
||||
isDuplicating,
|
||||
duplicateDashboard,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
40
client/app/pages/dashboards/hooks/useDuplicateDashboard.js
Normal file
@@ -0,0 +1,40 @@
|
||||
import { noop, extend, pick } from "lodash";
|
||||
import { useCallback, useState } from "react";
|
||||
import url from "url";
|
||||
import qs from "query-string";
|
||||
import { Dashboard } from "@/services/dashboard";
|
||||
|
||||
function keepCurrentUrlParams(targetUrl) {
|
||||
const currentUrlParams = qs.parse(window.location.search);
|
||||
targetUrl = url.parse(targetUrl);
|
||||
const targetUrlParams = qs.parse(targetUrl.search);
|
||||
return url.format(
|
||||
extend(pick(targetUrl, ["protocol", "auth", "host", "pathname"]), {
|
||||
search: qs.stringify(extend(currentUrlParams, targetUrlParams)),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export default function useDuplicateDashboard(dashboard) {
|
||||
const [isDuplicating, setIsDuplicating] = useState(false);
|
||||
|
||||
const duplicateDashboard = useCallback(() => {
|
||||
// To prevent opening the same tab, name must be unique for each browser
|
||||
const tabName = `duplicatedDashboardTab/${Math.random().toString()}`;
|
||||
|
||||
// We should open tab here because this moment is a part of user interaction;
|
||||
// later browser will block such attempts
|
||||
const tab = window.open("", tabName);
|
||||
|
||||
setIsDuplicating(true);
|
||||
Dashboard.fork({ id: dashboard.id })
|
||||
.then(newDashboard => {
|
||||
tab.location = keepCurrentUrlParams(newDashboard.getUrl());
|
||||
})
|
||||
.finally(() => {
|
||||
setIsDuplicating(false);
|
||||
});
|
||||
}, [dashboard.id]);
|
||||
|
||||
return [isDuplicating, isDuplicating ? noop : duplicateDashboard];
|
||||
}
|
||||
@@ -31,7 +31,8 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
<Link
|
||||
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer">
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
Read more
|
||||
</Link>
|
||||
.
|
||||
@@ -43,7 +44,7 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
|
||||
function EmailNotVerifiedAlert() {
|
||||
const verifyEmail = () => {
|
||||
axios.post("verification_email/").then(data => {
|
||||
axios.post("verification_email/").then((data) => {
|
||||
notification.success(data.message);
|
||||
});
|
||||
};
|
||||
@@ -100,6 +101,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/",
|
||||
title: "Redash",
|
||||
render: pageProps => <Home {...pageProps} />,
|
||||
render: (pageProps) => <Home {...pageProps} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -37,9 +37,10 @@
|
||||
|
||||
&.active {
|
||||
overflow: visible;
|
||||
max-height: unset !important;
|
||||
.ant-input {
|
||||
resize: vertical;
|
||||
max-height: 150px - 15px * 2;
|
||||
height: 30vh;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import QueryControlDropdown from "@/components/EditVisualizationButton/QueryCont
|
||||
import EditVisualizationButton from "@/components/EditVisualizationButton";
|
||||
import useQueryResultData from "@/lib/useQueryResultData";
|
||||
import { durationHumanize, pluralize, prettySize } from "@/lib/utils";
|
||||
import { isUndefined } from "lodash";
|
||||
|
||||
import "./QueryExecutionMetadata.less";
|
||||
|
||||
@@ -51,7 +52,8 @@ export default function QueryExecutionMetadata({
|
||||
"Result truncated to " +
|
||||
queryResultData.rows.length +
|
||||
" rows. Databricks may truncate query results that are unstably large."
|
||||
}>
|
||||
}
|
||||
>
|
||||
<WarningTwoTone twoToneColor="#FF9800" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
@@ -67,10 +69,9 @@ export default function QueryExecutionMetadata({
|
||||
)}
|
||||
{isQueryExecuting && <span>Running…</span>}
|
||||
</span>
|
||||
{queryResultData.metadata.data_scanned && (
|
||||
{!isUndefined(queryResultData.metadata.data_scanned) && !isQueryExecuting && (
|
||||
<span className="m-l-5">
|
||||
Data Scanned
|
||||
<strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
Data Scanned <strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
|
||||
@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
|
||||
import React from "react";
|
||||
|
||||
export function QuerySourceTypeIcon(props) {
|
||||
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
}
|
||||
|
||||
QuerySourceTypeIcon.propTypes = {
|
||||
|
||||
@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
|
||||
<div className="query-results-empty-state">
|
||||
<div className="empty-state-content">
|
||||
<div>
|
||||
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
||||
<img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
||||
</div>
|
||||
<h3>{title}</h3>
|
||||
<div className="m-b-20">{message}</div>
|
||||
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
|
||||
|
||||
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
|
||||
const handleDelete = useCallback(
|
||||
e => {
|
||||
(e) => {
|
||||
e.stopPropagation();
|
||||
Modal.confirm({
|
||||
title: "Delete Visualization",
|
||||
@@ -111,7 +111,8 @@ export default function QueryVisualizationTabs({
|
||||
className="add-visualization-button"
|
||||
data-test="NewVisualization"
|
||||
type="link"
|
||||
onClick={() => onAddVisualization()}>
|
||||
onClick={() => onAddVisualization()}
|
||||
>
|
||||
<i className="fa fa-plus" aria-hidden="true" />
|
||||
<span className="m-l-5 hidden-xs">Add Visualization</span>
|
||||
</Button>
|
||||
@@ -119,7 +120,7 @@ export default function QueryVisualizationTabs({
|
||||
}
|
||||
|
||||
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
|
||||
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||
const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||
const isMobile = useMedia({ maxWidth: 768 });
|
||||
|
||||
const [filters, setFilters] = useState([]);
|
||||
@@ -132,9 +133,10 @@ export default function QueryVisualizationTabs({
|
||||
data-test="QueryPageVisualizationTabs"
|
||||
animated={false}
|
||||
tabBarGutter={0}
|
||||
onChange={activeKey => onChangeTab(+activeKey)}
|
||||
destroyInactiveTabPane>
|
||||
{orderedVisualizations.map(visualization => (
|
||||
onChange={(activeKey) => onChangeTab(+activeKey)}
|
||||
destroyInactiveTabPane
|
||||
>
|
||||
{orderedVisualizations.map((visualization) => (
|
||||
<TabPane
|
||||
key={`${visualization.id}`}
|
||||
tab={
|
||||
@@ -144,7 +146,8 @@ export default function QueryVisualizationTabs({
|
||||
visualizationName={visualization.name}
|
||||
onDelete={() => onDeleteVisualization(visualization.id)}
|
||||
/>
|
||||
}>
|
||||
}
|
||||
>
|
||||
{queryResult ? (
|
||||
<VisualizationRenderer
|
||||
visualization={visualization}
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { reduce } from "lodash";
|
||||
import localOptions from "@/lib/localOptions";
|
||||
|
||||
function calculateTokensCount(schema) {
|
||||
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
|
||||
}
|
||||
|
||||
export default function useAutocompleteFlags(schema) {
|
||||
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]);
|
||||
const isAvailable = true;
|
||||
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
|
||||
|
||||
const toggleAutocomplete = useCallback(state => {
|
||||
const toggleAutocomplete = useCallback((state) => {
|
||||
setIsEnabled(state);
|
||||
localOptions.set("liveAutocomplete", state);
|
||||
}, []);
|
||||
|
||||
@@ -17,14 +17,16 @@ export default function BeaconConsentSettings(props) {
|
||||
Anonymous Usage Data Sharing
|
||||
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||
</span>
|
||||
}>
|
||||
}
|
||||
>
|
||||
{loading ? (
|
||||
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||
) : (
|
||||
<Checkbox
|
||||
name="beacon_consent"
|
||||
checked={values.beacon_consent}
|
||||
onChange={e => onChange({ beacon_consent: e.target.checked })}>
|
||||
onChange={(e) => onChange({ beacon_consent: e.target.checked })}
|
||||
>
|
||||
Help Redash improve by automatically sending anonymous usage data
|
||||
</Checkbox>
|
||||
)}
|
||||
|
||||
@@ -36,6 +36,7 @@ const Alert = {
|
||||
delete: data => axios.delete(`api/alerts/${data.id}`),
|
||||
mute: data => axios.post(`api/alerts/${data.id}/mute`),
|
||||
unmute: data => axios.delete(`api/alerts/${data.id}/mute`),
|
||||
evaluate: data => axios.post(`api/alerts/${data.id}/eval`),
|
||||
};
|
||||
|
||||
export default Alert;
|
||||
|
||||
@@ -172,6 +172,7 @@ const DashboardService = {
|
||||
favorites: params => axios.get("api/dashboards/favorites", { params }).then(transformResponse),
|
||||
favorite: ({ id }) => axios.post(`api/dashboards/${id}/favorite`),
|
||||
unfavorite: ({ id }) => axios.delete(`api/dashboards/${id}/favorite`),
|
||||
fork: ({ id }) => axios.post(`api/dashboards/${id}/fork`, { id }).then(transformResponse),
|
||||
};
|
||||
|
||||
_.extend(Dashboard, DashboardService);
|
||||
@@ -265,3 +266,7 @@ Dashboard.prototype.favorite = function favorite() {
|
||||
Dashboard.prototype.unfavorite = function unfavorite() {
|
||||
return Dashboard.unfavorite(this);
|
||||
};
|
||||
|
||||
Dashboard.prototype.getUrl = function getUrl() {
|
||||
return urlForDashboard(this);
|
||||
};
|
||||
|
||||
@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
|
||||
|
||||
export const SCHEMA_NOT_SUPPORTED = 1;
|
||||
export const SCHEMA_LOAD_ERROR = 2;
|
||||
export const IMG_ROOT = "static/images/db-logos";
|
||||
export const IMG_ROOT = "/static/images/db-logos";
|
||||
|
||||
function mapSchemaColumnsToObject(columns) {
|
||||
return map(columns, column => (isObject(column) ? column : { name: column }));
|
||||
return map(columns, (column) => (isObject(column) ? column : { name: column }));
|
||||
}
|
||||
|
||||
const DataSource = {
|
||||
query: () => axios.get("api/data_sources"),
|
||||
get: ({ id }) => axios.get(`api/data_sources/${id}`),
|
||||
types: () => axios.get("api/data_sources/types"),
|
||||
create: data => axios.post(`api/data_sources`, data),
|
||||
save: data => axios.post(`api/data_sources/${data.id}`, data),
|
||||
test: data => axios.post(`api/data_sources/${data.id}/test`),
|
||||
create: (data) => axios.post(`api/data_sources`, data),
|
||||
save: (data) => axios.post(`api/data_sources/${data.id}`, data),
|
||||
test: (data) => axios.post(`api/data_sources/${data.id}/test`),
|
||||
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
|
||||
fetchSchema: (data, refresh = false) => {
|
||||
const params = {};
|
||||
@@ -27,15 +27,15 @@ const DataSource = {
|
||||
|
||||
return axios
|
||||
.get(`api/data_sources/${data.id}/schema`, { params })
|
||||
.then(data => {
|
||||
.then((data) => {
|
||||
if (has(data, "job")) {
|
||||
return fetchDataFromJob(data.job.id).catch(error =>
|
||||
return fetchDataFromJob(data.job.id).catch((error) =>
|
||||
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
|
||||
);
|
||||
}
|
||||
return has(data, "schema") ? data.schema : Promise.reject();
|
||||
})
|
||||
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
||||
.then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ class DateParameter extends Parameter {
|
||||
return value;
|
||||
}
|
||||
|
||||
const normalizedValue = moment(value);
|
||||
const normalizedValue = moment(value, moment.ISO_8601, true);
|
||||
return normalizedValue.isValid() ? normalizedValue : null;
|
||||
}
|
||||
|
||||
|
||||
29
client/app/services/parameters/TextPatternParameter.js
Normal file
@@ -0,0 +1,29 @@
|
||||
import { toString, isNull } from "lodash";
|
||||
import Parameter from "./Parameter";
|
||||
|
||||
class TextPatternParameter extends Parameter {
|
||||
constructor(parameter, parentQueryId) {
|
||||
super(parameter, parentQueryId);
|
||||
this.regex = parameter.regex;
|
||||
this.setValue(parameter.value);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
normalizeValue(value) {
|
||||
const normalizedValue = toString(value);
|
||||
if (isNull(normalizedValue)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var re = new RegExp(this.regex);
|
||||
|
||||
if (re !== null) {
|
||||
if (re.test(normalizedValue)) {
|
||||
return normalizedValue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default TextPatternParameter;
|
||||
@@ -5,6 +5,7 @@ import EnumParameter from "./EnumParameter";
|
||||
import QueryBasedDropdownParameter from "./QueryBasedDropdownParameter";
|
||||
import DateParameter from "./DateParameter";
|
||||
import DateRangeParameter from "./DateRangeParameter";
|
||||
import TextPatternParameter from "./TextPatternParameter";
|
||||
|
||||
function createParameter(param, parentQueryId) {
|
||||
switch (param.type) {
|
||||
@@ -22,6 +23,8 @@ function createParameter(param, parentQueryId) {
|
||||
case "datetime-range":
|
||||
case "datetime-range-with-seconds":
|
||||
return new DateRangeParameter(param, parentQueryId);
|
||||
case "text-pattern":
|
||||
return new TextPatternParameter({ ...param, type: "text-pattern" }, parentQueryId);
|
||||
default:
|
||||
return new TextParameter({ ...param, type: "text" }, parentQueryId);
|
||||
}
|
||||
@@ -34,6 +37,7 @@ function cloneParameter(param) {
|
||||
export {
|
||||
Parameter,
|
||||
TextParameter,
|
||||
TextPatternParameter,
|
||||
NumberParameter,
|
||||
EnumParameter,
|
||||
QueryBasedDropdownParameter,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
createParameter,
|
||||
TextParameter,
|
||||
TextPatternParameter,
|
||||
NumberParameter,
|
||||
EnumParameter,
|
||||
QueryBasedDropdownParameter,
|
||||
@@ -12,6 +13,7 @@ describe("Parameter", () => {
|
||||
describe("create", () => {
|
||||
const parameterTypes = [
|
||||
["text", TextParameter],
|
||||
["text-pattern", TextPatternParameter],
|
||||
["number", NumberParameter],
|
||||
["enum", EnumParameter],
|
||||
["query", QueryBasedDropdownParameter],
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
import { createParameter } from "..";
|
||||
|
||||
describe("TextPatternParameter", () => {
|
||||
let param;
|
||||
|
||||
beforeEach(() => {
|
||||
param = createParameter({ name: "param", title: "Param", type: "text-pattern", regex: "a+" });
|
||||
});
|
||||
|
||||
describe("noramlizeValue", () => {
|
||||
test("converts matching strings", () => {
|
||||
const normalizedValue = param.normalizeValue("art");
|
||||
expect(normalizedValue).toBe("art");
|
||||
});
|
||||
|
||||
test("returns null when string does not match pattern", () => {
|
||||
const normalizedValue = param.normalizeValue("brt");
|
||||
expect(normalizedValue).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -114,7 +114,7 @@ export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
}
|
||||
|
||||
export function isDateTime(v) {
|
||||
return isString(v) && moment(v).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
|
||||
return isString(v) && moment(v, moment.ISO_8601, true).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
|
||||
}
|
||||
|
||||
class QueryResult {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* eslint-disable import/no-extraneous-dependencies, no-console */
|
||||
const { find } = require("lodash");
|
||||
const atob = require("atob");
|
||||
const { execSync } = require("child_process");
|
||||
const { get, post } = require("request").defaults({ jar: true });
|
||||
const { seedData } = require("./seed-data");
|
||||
@@ -44,44 +43,32 @@ function seedDatabase(seedValues) {
|
||||
|
||||
function buildServer() {
|
||||
console.log("Building the server...");
|
||||
execSync("docker-compose -p cypress build", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress build", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function startServer() {
|
||||
console.log("Starting the server...");
|
||||
execSync("docker-compose -p cypress up -d", { stdio: "inherit" });
|
||||
execSync("docker-compose -p cypress run server create_db", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress up -d", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress run server create_db", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function stopServer() {
|
||||
console.log("Stopping the server...");
|
||||
execSync("docker-compose -p cypress down", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress down", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function runCypressCI() {
|
||||
const {
|
||||
PERCY_TOKEN_ENCODED,
|
||||
CYPRESS_PROJECT_ID_ENCODED,
|
||||
CYPRESS_RECORD_KEY_ENCODED,
|
||||
GITHUB_REPOSITORY,
|
||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||
} = process.env;
|
||||
|
||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
||||
if (PERCY_TOKEN_ENCODED) {
|
||||
process.env.PERCY_TOKEN = atob(`${PERCY_TOKEN_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_PROJECT_ID_ENCODED) {
|
||||
process.env.CYPRESS_PROJECT_ID = atob(`${CYPRESS_PROJECT_ID_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_RECORD_KEY_ENCODED) {
|
||||
process.env.CYPRESS_RECORD_KEY = atob(`${CYPRESS_RECORD_KEY_ENCODED}`);
|
||||
}
|
||||
if (GITHUB_REPOSITORY === "getredash/redash" && process.env.CYPRESS_RECORD_KEY) {
|
||||
process.env.CYPRESS_OPTIONS = "--record";
|
||||
}
|
||||
|
||||
execSync(
|
||||
"COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker-compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
"COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,16 +2,14 @@ import { dragParam } from "../../support/parameters";
|
||||
import dayjs from "dayjs";
|
||||
|
||||
function openAndSearchAntdDropdown(testId, paramOption) {
|
||||
cy.getByTestId(testId)
|
||||
.find(".ant-select-selection-search-input")
|
||||
.type(paramOption, { force: true });
|
||||
cy.getByTestId(testId).find(".ant-select-selection-search-input").type(paramOption, { force: true });
|
||||
}
|
||||
|
||||
describe("Parameter", () => {
|
||||
const expectDirtyStateChange = edit => {
|
||||
const expectDirtyStateChange = (edit) => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".parameter-input")
|
||||
.should($el => {
|
||||
.should(($el) => {
|
||||
assert.isUndefined($el.data("dirty"));
|
||||
});
|
||||
|
||||
@@ -19,7 +17,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".parameter-input")
|
||||
.should($el => {
|
||||
.should(($el) => {
|
||||
assert.isTrue($el.data("dirty"));
|
||||
});
|
||||
};
|
||||
@@ -42,9 +40,7 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -53,13 +49,66 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Text Pattern Parameter", () => {
|
||||
beforeEach(() => {
|
||||
const queryData = {
|
||||
name: "Text Pattern Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
options: {
|
||||
parameters: [{ name: "test-parameter", title: "Test Parameter", type: "text-pattern", regex: "a.*a" }],
|
||||
},
|
||||
};
|
||||
|
||||
cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", "arta");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arounda");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", "arounda");
|
||||
});
|
||||
|
||||
it("throws error message with invalid query request", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}abcab");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("QueryExecutionStatus").should("exist");
|
||||
});
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't let user save invalid regex", () => {
|
||||
cy.get(".fa-cog").click();
|
||||
cy.getByTestId("RegexPatternInput").type("{selectall}[");
|
||||
cy.contains("Invalid Regex Pattern").should("exist");
|
||||
cy.getByTestId("SaveParameterSettings").click();
|
||||
cy.get(".fa-cog").click();
|
||||
cy.getByTestId("RegexPatternInput").should("not.equal", "[");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Number Parameter", () => {
|
||||
beforeEach(() => {
|
||||
const queryData = {
|
||||
@@ -74,17 +123,13 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}42");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", 42);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}31415");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}31415");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -93,9 +138,7 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}42");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -119,10 +162,7 @@ describe("Parameter", () => {
|
||||
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
|
||||
|
||||
// only the filtered option should be on the DOM
|
||||
cy.get(".ant-select-item-option")
|
||||
.should("have.length", 1)
|
||||
.and("contain", "value2")
|
||||
.click();
|
||||
cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
// ensure that query is being executed
|
||||
@@ -140,12 +180,10 @@ describe("Parameter", () => {
|
||||
SaveParameterSettings
|
||||
`);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select-selection-search")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select-selection-search").click();
|
||||
|
||||
// select all unselected options
|
||||
cy.get(".ant-select-item-option").each($option => {
|
||||
cy.get(".ant-select-item-option").each(($option) => {
|
||||
if (!$option.hasClass("ant-select-item-option-selected")) {
|
||||
cy.wrap($option).click();
|
||||
}
|
||||
@@ -160,9 +198,7 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
|
||||
|
||||
cy.contains(".ant-select-item-option", "value2").click();
|
||||
});
|
||||
@@ -176,7 +212,7 @@ describe("Parameter", () => {
|
||||
name: "Dropdown Query",
|
||||
query: "",
|
||||
};
|
||||
cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
|
||||
cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
|
||||
const queryData = {
|
||||
name: "Query Based Dropdown Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
@@ -208,7 +244,7 @@ describe("Parameter", () => {
|
||||
SELECT 'value2' AS name, 2 AS value UNION ALL
|
||||
SELECT 'value3' AS name, 3 AS value`,
|
||||
};
|
||||
cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
|
||||
cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
|
||||
const queryData = {
|
||||
name: "Query Based Dropdown Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
@@ -234,10 +270,7 @@ describe("Parameter", () => {
|
||||
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
|
||||
|
||||
// only the filtered option should be on the DOM
|
||||
cy.get(".ant-select-item-option")
|
||||
.should("have.length", 1)
|
||||
.and("contain", "value2")
|
||||
.click();
|
||||
cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
// ensure that query is being executed
|
||||
@@ -255,12 +288,10 @@ describe("Parameter", () => {
|
||||
SaveParameterSettings
|
||||
`);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
|
||||
|
||||
// make sure all options are unselected and select all
|
||||
cy.get(".ant-select-item-option").each($option => {
|
||||
cy.get(".ant-select-item-option").each(($option) => {
|
||||
expect($option).not.to.have.class("ant-select-dropdown-menu-item-selected");
|
||||
cy.wrap($option).click();
|
||||
});
|
||||
@@ -274,14 +305,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
});
|
||||
|
||||
const selectCalendarDate = date => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.click();
|
||||
const selectCalendarDate = (date) => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", date)
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", date).click();
|
||||
};
|
||||
|
||||
describe("Date Parameter", () => {
|
||||
@@ -303,10 +330,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then(clock => clock.restore());
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date", function() {
|
||||
it("updates the results after selecting a date", function () {
|
||||
selectCalendarDate("15");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
@@ -314,12 +341,10 @@ describe("Parameter", () => {
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("15/MM/YY"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function() {
|
||||
it("allows picking a dynamic date", function () {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Today/Now")
|
||||
.click();
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -350,14 +375,11 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then(clock => clock.restore());
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date and clicking in ok", function() {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.click();
|
||||
it("updates the results after selecting a date and clicking in ok", function () {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
|
||||
|
||||
selectCalendarDate("15");
|
||||
|
||||
@@ -368,27 +390,20 @@ describe("Parameter", () => {
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-15 HH:mm"));
|
||||
});
|
||||
|
||||
it("shows the current datetime after clicking in Now", function() {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.click();
|
||||
it("shows the current datetime after clicking in Now", function () {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains("Now")
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains("Now").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-DD HH:mm"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function() {
|
||||
it("allows picking a dynamic date", function () {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Today/Now")
|
||||
.click();
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -397,31 +412,20 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains("Now")
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains("Now").click();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Date Range Parameter", () => {
|
||||
const selectCalendarDateRange = (startDate, endDate) => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.first()
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").first().click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", startDate)
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", startDate).click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", endDate)
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", endDate).click();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -442,10 +446,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then(clock => clock.restore());
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date range", function() {
|
||||
it("updates the results after selecting a date range", function () {
|
||||
selectCalendarDateRange("15", "20");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
@@ -457,12 +461,10 @@ describe("Parameter", () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date range", function() {
|
||||
it("allows picking a dynamic date range", function () {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Last month")
|
||||
.click();
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Last month").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -479,15 +481,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
describe("Apply Changes", () => {
|
||||
const expectAppliedChanges = apply => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.type("Redash");
|
||||
const expectAppliedChanges = (apply) => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-2")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
|
||||
|
||||
cy.location("search").should("not.contain", "Redash");
|
||||
|
||||
@@ -523,10 +520,7 @@ describe("Parameter", () => {
|
||||
it("shows and hides according to parameter dirty state", () => {
|
||||
cy.getByTestId("ParameterApplyButton").should("not.be", "visible");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Param")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Param").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").should("be.visible");
|
||||
|
||||
@@ -536,21 +530,13 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates dirty counter", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton")
|
||||
.find(".ant-badge-count p.current")
|
||||
.should("contain", "1");
|
||||
cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "1");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-2")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton")
|
||||
.find(".ant-badge-count p.current")
|
||||
.should("contain", "2");
|
||||
cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "2");
|
||||
});
|
||||
|
||||
it('applies changes from "Apply Changes" button', () => {
|
||||
@@ -560,16 +546,13 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it('applies changes from "alt+enter" keyboard shortcut', () => {
|
||||
expectAppliedChanges(input => {
|
||||
expectAppliedChanges((input) => {
|
||||
input.type("{alt}{enter}");
|
||||
});
|
||||
});
|
||||
|
||||
it('disables "Execute" button', () => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
|
||||
cy.getByTestId("ExecuteButton").should("be.disabled");
|
||||
|
||||
cy.get("@Input").clear();
|
||||
@@ -594,15 +577,12 @@ describe("Parameter", () => {
|
||||
|
||||
cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
|
||||
|
||||
cy.get(".parameter-block")
|
||||
.first()
|
||||
.invoke("width")
|
||||
.as("paramWidth");
|
||||
cy.get(".parameter-block").first().invoke("width").as("paramWidth");
|
||||
|
||||
cy.get("body").type("{alt}D"); // hide schema browser
|
||||
});
|
||||
|
||||
it("is possible to rearrange parameters", function() {
|
||||
it("is possible to rearrange parameters", function () {
|
||||
cy.server();
|
||||
cy.route("POST", "**/api/queries/*").as("QuerySave");
|
||||
|
||||
|
||||
@@ -26,16 +26,16 @@ const SQL = `
|
||||
describe("Chart", () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
cy.createQuery({ name: "Chart Visualization", query: SQL })
|
||||
.its("id")
|
||||
.as("queryId");
|
||||
cy.createQuery({ name: "Chart Visualization", query: SQL }).its("id").as("queryId");
|
||||
});
|
||||
|
||||
it("creates Bar charts", function() {
|
||||
it("creates Bar charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
|
||||
const getBarChartAssertionFunction = (specificBarChartAssertionFn = () => {}) => () => {
|
||||
const getBarChartAssertionFunction =
|
||||
(specificBarChartAssertionFn = () => {}) =>
|
||||
() => {
|
||||
// checks for TabbedEditor standard tabs
|
||||
assertTabbedEditor();
|
||||
|
||||
@@ -95,8 +95,8 @@ describe("Chart", () => {
|
||||
|
||||
const withDashboardWidgetsAssertionFn = (widgetGetters, dashboardUrl) => {
|
||||
cy.visit(dashboardUrl);
|
||||
widgetGetters.forEach(widgetGetter => {
|
||||
cy.get(`@${widgetGetter}`).then(widget => {
|
||||
widgetGetters.forEach((widgetGetter) => {
|
||||
cy.get(`@${widgetGetter}`).then((widget) => {
|
||||
cy.getByTestId(getWidgetTestId(widget)).within(() => {
|
||||
cy.get("g.points").should("exist");
|
||||
});
|
||||
@@ -107,4 +107,34 @@ describe("Chart", () => {
|
||||
createDashboardWithCharts("Bar chart visualizations", chartGetters, withDashboardWidgetsAssertionFn);
|
||||
cy.percySnapshot("Visualizations - Charts - Bar");
|
||||
});
|
||||
it("colors Bar charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionViridis").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionTableau 10").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionD3 Category 10").click();
|
||||
});
|
||||
it("colors Pie charts", function () {
|
||||
cy.visit(`queries/${this.queryId}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("NewVisualization").click();
|
||||
cy.getByTestId("Chart.GlobalSeriesType").click();
|
||||
cy.getByTestId("Chart.ChartType.pie").click();
|
||||
cy.getByTestId("Chart.ColumnMapping.x").selectAntdOption("Chart.ColumnMapping.x.stage");
|
||||
cy.getByTestId("Chart.ColumnMapping.y").selectAntdOption("Chart.ColumnMapping.y.value1");
|
||||
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionViridis").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionTableau 10").click();
|
||||
cy.getByTestId("ColorScheme").click();
|
||||
cy.getByTestId("ColorOptionD3 Category 10").click();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,10 +22,7 @@ function prepareVisualization(query, type, name, options) {
|
||||
cy.get("body").type("{alt}D");
|
||||
|
||||
// do some pre-checks here to ensure that visualization was created and is visible
|
||||
cy.getByTestId("TableVisualization")
|
||||
.should("exist")
|
||||
.find("table")
|
||||
.should("exist");
|
||||
cy.getByTestId("TableVisualization").should("exist").find("table").should("exist");
|
||||
|
||||
return cy.then(() => ({ queryId, visualizationId }));
|
||||
});
|
||||
@@ -53,7 +50,7 @@ describe("Table", () => {
|
||||
});
|
||||
|
||||
describe("Sorting data", () => {
|
||||
beforeEach(function() {
|
||||
beforeEach(function () {
|
||||
const { query, config } = MultiColumnSort;
|
||||
prepareVisualization(query, "TABLE", "Sort data", config).then(({ queryId, visualizationId }) => {
|
||||
this.queryId = queryId;
|
||||
@@ -61,39 +58,22 @@ describe("Table", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("sorts data by a single column", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("c")
|
||||
.should("exist")
|
||||
.click();
|
||||
it("sorts data by a single column", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click();
|
||||
cy.percySnapshot("Visualizations - Table (Single-column sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
it("sorts data by a multiple columns", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("a")
|
||||
.should("exist")
|
||||
.click();
|
||||
it("sorts data by a multiple columns", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("a").should("exist").click();
|
||||
|
||||
cy.get("body").type("{shift}", { release: false });
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("b")
|
||||
.should("exist")
|
||||
.click();
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("b").should("exist").click();
|
||||
|
||||
cy.percySnapshot("Visualizations - Table (Multi-column sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
|
||||
it("sorts data in reverse order", function() {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table th")
|
||||
.contains("c")
|
||||
.should("exist")
|
||||
.click()
|
||||
.click();
|
||||
it("sorts data in reverse order", function () {
|
||||
cy.getByTestId("TableVisualization").find("table th").contains("c").should("exist").click().click();
|
||||
cy.percySnapshot("Visualizations - Table (Single-column reverse sort)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
@@ -101,10 +81,7 @@ describe("Table", () => {
|
||||
it("searches in multiple columns", () => {
|
||||
const { query, config } = SearchInData;
|
||||
prepareVisualization(query, "TABLE", "Search", config).then(({ visualizationId }) => {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("table input")
|
||||
.should("exist")
|
||||
.type("test");
|
||||
cy.getByTestId("TableVisualization").find("table input").should("exist").type("test");
|
||||
cy.percySnapshot("Visualizations - Table (Search in data)", { widths: [viewportWidth] });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
const { extend, get, merge, find } = Cypress._;
|
||||
|
||||
const post = options =>
|
||||
const post = (options) =>
|
||||
cy
|
||||
.getCookie("csrf_token")
|
||||
.then(csrf => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
.then((csrf) => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
|
||||
Cypress.Commands.add("createDashboard", name => {
|
||||
Cypress.Commands.add("createDashboard", (name) => {
|
||||
return post({ url: "api/dashboards", body: { name } }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -28,7 +28,7 @@ Cypress.Commands.add("createQuery", (data, shouldPublish = true) => {
|
||||
// eslint-disable-next-line cypress/no-assigning-return-values
|
||||
let request = post({ url: "/api/queries", body: merged }).then(({ body }) => body);
|
||||
if (shouldPublish) {
|
||||
request = request.then(query =>
|
||||
request = request.then((query) =>
|
||||
post({ url: `/api/queries/${query.id}`, body: { is_draft: false } }).then(() => query)
|
||||
);
|
||||
}
|
||||
@@ -86,6 +86,7 @@ Cypress.Commands.add("addWidget", (dashboardId, visualizationId, options = {}) =
|
||||
Cypress.Commands.add("createAlert", (queryId, options = {}, name) => {
|
||||
const defaultOptions = {
|
||||
column: "?column?",
|
||||
selector: "first",
|
||||
op: "greater than",
|
||||
rearm: 0,
|
||||
value: 1,
|
||||
@@ -109,7 +110,7 @@ Cypress.Commands.add("createUser", ({ name, email, password }) => {
|
||||
url: "api/users?no_invite=yes",
|
||||
body: { name, email },
|
||||
failOnStatusCode: false,
|
||||
}).then(xhr => {
|
||||
}).then((xhr) => {
|
||||
const { status, body } = xhr;
|
||||
if (status < 200 || status > 400) {
|
||||
throw new Error(xhr);
|
||||
@@ -146,7 +147,7 @@ Cypress.Commands.add("getDestinations", () => {
|
||||
Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) => {
|
||||
return cy
|
||||
.getDestinations()
|
||||
.then(destinations => {
|
||||
.then((destinations) => {
|
||||
const destination = find(destinations, { name: destinationName });
|
||||
if (!destination) {
|
||||
throw new Error("Destination not found");
|
||||
@@ -166,6 +167,6 @@ Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) =>
|
||||
});
|
||||
});
|
||||
|
||||
Cypress.Commands.add("updateOrgSettings", settings => {
|
||||
Cypress.Commands.add("updateOrgSettings", (settings) => {
|
||||
return post({ url: "api/settings/organization", body: settings }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
export function expectTableToHaveLength(length) {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr")
|
||||
.should("have.length", length);
|
||||
cy.getByTestId("TableVisualization").find("tbody tr").should("have.length", length);
|
||||
}
|
||||
|
||||
export function expectFirstColumnToHaveMembers(values) {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr td:first-child")
|
||||
.then($cell => Cypress.$.map($cell, item => Cypress.$(item).text()))
|
||||
.then(firstColumnCells => expect(firstColumnCells).to.have.members(values));
|
||||
.then(($cell) => Cypress.$.map($cell, (item) => Cypress.$(item).text()))
|
||||
.then((firstColumnCells) => expect(firstColumnCells).to.have.members(values));
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# This configuration file is for the **development** setup.
|
||||
# For a production example please refer to getredash/setup repository on GitHub.
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: .
|
||||
@@ -11,6 +10,7 @@ x-redash-service: &redash-service
|
||||
env_file:
|
||||
- .env
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_HOST: http://localhost:5001
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
ports:
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
@@ -7,7 +7,7 @@ Create Date: 2020-12-23 21:35:32.766354
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0ec979123ba4'
|
||||
@@ -18,7 +18,7 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('dashboards', sa.Column('options', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||
op.add_column('dashboards', sa.Column('options', JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@@ -10,8 +10,7 @@ import json
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
|
||||
from redash.models import MutableDict, PseudoJSON
|
||||
from redash.models import MutableDict
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -41,7 +40,7 @@ def upgrade():
|
||||
"queries",
|
||||
sa.Column(
|
||||
"schedule",
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
sa.Text(),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
@@ -51,7 +50,7 @@ def upgrade():
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
sa.Column("old_schedule", sa.String(length=10)),
|
||||
)
|
||||
|
||||
@@ -85,7 +84,7 @@ def downgrade():
|
||||
"queries",
|
||||
sa.Column(
|
||||
"old_schedule",
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
sa.Text(),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
@@ -93,8 +92,8 @@ def downgrade():
|
||||
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
sa.Column("old_schedule", sa.Text()),
|
||||
)
|
||||
|
||||
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
|
||||
@@ -106,7 +105,7 @@ def downgrade():
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", sa.String(length=10)),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", sa.Text()),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -0,0 +1,135 @@
|
||||
"""change type of json fields from varchar to json
|
||||
|
||||
Revision ID: 7205816877ec
|
||||
Revises: 7ce5925f832b
|
||||
Create Date: 2024-01-03 13:55:18.885021
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB, JSON
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7205816877ec'
|
||||
down_revision = '7ce5925f832b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
connection = op.get_bind()
|
||||
op.alter_column('queries', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='schedule::jsonb',
|
||||
)
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='additional_properties::jsonb',
|
||||
)
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='settings::jsonb',
|
||||
)
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='layout::jsonb',
|
||||
)
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='change::jsonb',
|
||||
)
|
||||
op.alter_column('visualizations', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
op.alter_column('widgets', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
connection = op.get_bind()
|
||||
op.alter_column('queries', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='options::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='schedule::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='additional_properties::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='settings::text',
|
||||
existing_nullable=True,
|
||||
)
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
existing_nullable=True,
|
||||
)
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
)
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='layout::text',
|
||||
)
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='change::json',
|
||||
)
|
||||
op.alter_column('visualizations', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
)
|
||||
op.alter_column('widgets', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
)
|
||||
@@ -7,10 +7,9 @@ Create Date: 2019-01-17 13:22:21.729334
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.sql import table
|
||||
|
||||
from redash.models import MutableDict, PseudoJSON
|
||||
from redash.models import MutableDict
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "73beceabb948"
|
||||
@@ -43,7 +42,7 @@ def upgrade():
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
"""create sqlalchemy_searchable expressions
|
||||
|
||||
Revision ID: 7ce5925f832b
|
||||
Revises: 1038c2174f5d
|
||||
Create Date: 2023-09-29 16:48:29.517762
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy_searchable import sql_expressions
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7ce5925f832b'
|
||||
down_revision = '1038c2174f5d'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute(sql_expressions)
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
@@ -6,7 +6,7 @@ Create Date: 2018-01-31 15:20:30.396533
|
||||
|
||||
"""
|
||||
|
||||
import simplejson
|
||||
import json
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
@@ -27,7 +27,7 @@ def upgrade():
|
||||
dashboard_result = db.session.execute("SELECT id, layout FROM dashboards")
|
||||
for dashboard in dashboard_result:
|
||||
print(" Updating dashboard: {}".format(dashboard["id"]))
|
||||
layout = simplejson.loads(dashboard["layout"])
|
||||
layout = json.loads(dashboard["layout"])
|
||||
|
||||
print(" Building widgets map:")
|
||||
widgets = {}
|
||||
@@ -53,7 +53,7 @@ def upgrade():
|
||||
if widget is None:
|
||||
continue
|
||||
|
||||
options = simplejson.loads(widget["options"]) or {}
|
||||
options = json.loads(widget["options"]) or {}
|
||||
options["position"] = {
|
||||
"row": row_index,
|
||||
"col": column_index * column_size,
|
||||
@@ -62,7 +62,7 @@ def upgrade():
|
||||
|
||||
db.session.execute(
|
||||
"UPDATE widgets SET options=:options WHERE id=:id",
|
||||
{"options": simplejson.dumps(options), "id": widget_id},
|
||||
{"options": json.dumps(options), "id": widget_id},
|
||||
)
|
||||
|
||||
dashboard_result.close()
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2019-01-31 09:21:31.517265
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import BYTEA
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
|
||||
@@ -18,7 +18,6 @@ from redash.models.types import (
|
||||
Configuration,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
PseudoJSON,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -31,7 +30,7 @@ depends_on = None
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"data_sources",
|
||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True),
|
||||
sa.Column("encrypted_options", BYTEA(), nullable=True),
|
||||
)
|
||||
|
||||
# copy values
|
||||
|
||||
64
migrations/versions/9e8c841d1a30_fix_hash.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""fix_hash
|
||||
|
||||
Revision ID: 9e8c841d1a30
|
||||
Revises: 7205816877ec
|
||||
Create Date: 2024-10-05 18:55:35.730573
|
||||
|
||||
"""
|
||||
import logging
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy import select
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, get_query_runner
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9e8c841d1a30'
|
||||
down_revision = '7205816877ec'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def update_query_hash(record):
|
||||
should_apply_auto_limit = record['options'].get("apply_auto_limit", False) if record['options'] else False
|
||||
query_runner = get_query_runner(record['type'], {}) if record['type'] else BaseQueryRunner({})
|
||||
query_text = record['query']
|
||||
|
||||
parameters_dict = {p["name"]: p.get("value") for p in record['options'].get('parameters', [])} if record.options else {}
|
||||
if any(parameters_dict):
|
||||
print(f"Query {record['query_id']} has parameters. Hash might be incorrect.")
|
||||
|
||||
return query_runner.gen_query_hash(query_text, should_apply_auto_limit)
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
metadata = sa.MetaData(bind=conn)
|
||||
queries = sa.Table("queries", metadata, autoload=True)
|
||||
data_sources = sa.Table("data_sources", metadata, autoload=True)
|
||||
|
||||
joined_table = queries.outerjoin(data_sources, queries.c.data_source_id == data_sources.c.id)
|
||||
|
||||
query = select([
|
||||
queries.c.id.label("query_id"),
|
||||
queries.c.query,
|
||||
queries.c.query_hash,
|
||||
queries.c.options,
|
||||
data_sources.c.id.label("data_source_id"),
|
||||
data_sources.c.type
|
||||
]).select_from(joined_table)
|
||||
|
||||
for record in conn.execute(query):
|
||||
new_hash = update_query_hash(record)
|
||||
print(f"Updating hash for query {record['query_id']} from {record['query_hash']} to {new_hash}")
|
||||
conn.execute(
|
||||
queries.update()
|
||||
.where(queries.c.id == record['query_id'])
|
||||
.values(query_hash=new_hash))
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
@@ -9,7 +9,7 @@ import re
|
||||
from funcy import flatten, compact
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from redash import models
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -21,10 +21,10 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
"dashboards", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
op.add_column(
|
||||
"queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
"queries", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2020-12-14 21:42:48.661684
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import BYTEA
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
|
||||
@@ -30,7 +30,7 @@ depends_on = None
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"notification_destinations",
|
||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True)
|
||||
sa.Column("encrypted_options", BYTEA(), nullable=True)
|
||||
)
|
||||
|
||||
# copy values
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2018-11-08 16:12:17.023569
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "e7f8a917aa8e"
|
||||
@@ -21,7 +21,7 @@ def upgrade():
|
||||
"users",
|
||||
sa.Column(
|
||||
"details",
|
||||
postgresql.JSON(astext_type=sa.Text()),
|
||||
JSON(astext_type=sa.Text()),
|
||||
server_default="{}",
|
||||
nullable=True,
|
||||
),
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2022-01-31 15:24:16.507888
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import JSON, JSONB
|
||||
|
||||
from redash.models import db
|
||||
|
||||
@@ -23,8 +23,8 @@ def upgrade():
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
### end Alembic commands ###
|
||||
@@ -52,8 +52,8 @@ def downgrade():
|
||||
connection.execute(update_query)
|
||||
db.session.commit()
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::json"))
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
command = "cd ../ && yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 && yarn build && cd ./client"
|
||||
|
||||
[build.environment]
|
||||
NODE_VERSION = "16.20.1"
|
||||
NODE_VERSION = "18"
|
||||
NETLIFY_USE_YARN = "true"
|
||||
YARN_VERSION = "1.22.19"
|
||||
CYPRESS_INSTALL_BINARY = "0"
|
||||
|
||||
27
package.json
@@ -1,20 +1,19 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "23.09.0-dev",
|
||||
"version": "25.02.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "npm-run-all --parallel watch:viz webpack-dev-server",
|
||||
"clean": "rm -rf ./client/dist/",
|
||||
"build:viz": "(cd viz-lib && yarn build:babel)",
|
||||
"build": "yarn clean && yarn build:viz && NODE_ENV=production webpack",
|
||||
"build:old-node-version": "yarn clean && NODE_ENV=production node --max-old-space-size=4096 node_modules/.bin/webpack",
|
||||
"watch:app": "webpack watch --progress",
|
||||
"build": "yarn clean && yarn build:viz && NODE_OPTIONS=--openssl-legacy-provider NODE_ENV=production webpack",
|
||||
"watch:app": "NODE_OPTIONS=--openssl-legacy-provider webpack watch --progress",
|
||||
"watch:viz": "(cd viz-lib && yarn watch:babel)",
|
||||
"watch": "npm-run-all --parallel watch:*",
|
||||
"webpack-dev-server": "webpack-dev-server",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on webpack",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"lint": "yarn lint:base --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:fix": "yarn lint:base --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:base": "eslint --config ./client/.eslintrc.js --ignore-path ./client/.eslintignore",
|
||||
@@ -34,7 +33,8 @@
|
||||
"url": "git+https://github.com/getredash/redash.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">14.16.0 <17.0.0",
|
||||
"node": ">16.0 <21.0",
|
||||
"npm": "please-use-yarn",
|
||||
"yarn": "^1.22.10"
|
||||
},
|
||||
"author": "Redash Contributors",
|
||||
@@ -50,11 +50,12 @@
|
||||
"antd": "^4.4.3",
|
||||
"axios": "0.27.2",
|
||||
"axios-auth-refresh": "3.3.6",
|
||||
"bootstrap": "^3.3.7",
|
||||
"bootstrap": "^3.4.1",
|
||||
"classnames": "^2.2.6",
|
||||
"d3": "^3.5.17",
|
||||
"debug": "^3.1.0",
|
||||
"debug": "^3.2.7",
|
||||
"dompurify": "^2.0.17",
|
||||
"elliptic": "^6.6.0",
|
||||
"font-awesome": "^4.7.0",
|
||||
"history": "^4.10.1",
|
||||
"hoist-non-react-statics": "^3.3.0",
|
||||
@@ -62,8 +63,8 @@
|
||||
"material-design-iconic-font": "^2.2.0",
|
||||
"mousetrap": "^1.6.1",
|
||||
"mustache": "^2.3.0",
|
||||
"numbro": "^2.3.6",
|
||||
"path-to-regexp": "^3.1.0",
|
||||
"numeral": "^2.0.6",
|
||||
"path-to-regexp": "^3.3.0",
|
||||
"prop-types": "^15.6.1",
|
||||
"query-string": "^6.9.0",
|
||||
"react": "16.14.0",
|
||||
@@ -178,6 +179,10 @@
|
||||
"viz-lib/**"
|
||||
]
|
||||
},
|
||||
"browser": {
|
||||
"fs": false,
|
||||
"path": false
|
||||
},
|
||||
"//": "browserslist set to 'Async functions' compatibility",
|
||||
"browserslist": [
|
||||
"Edge >= 15",
|
||||
|
||||
5496
poetry.lock
generated
Normal file
172
pyproject.toml
@@ -10,9 +10,169 @@ force-exclude = '''
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.isort]
|
||||
py_version = 38
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
use_parentheses = true
|
||||
skip = "migrations"
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "25.02.0-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
maintainers = [
|
||||
"Redash maintainers and contributors <maintainers@redash.io>",
|
||||
]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8,<3.11"
|
||||
advocate = "1.0.0"
|
||||
aniso8601 = "8.0.0"
|
||||
authlib = "0.15.5"
|
||||
backoff = "2.2.1"
|
||||
blinker = "1.6.2"
|
||||
click = "8.1.3"
|
||||
cryptography = "43.0.1"
|
||||
disposable-email-domains = ">=0.0.52"
|
||||
flask = "2.3.2"
|
||||
flask-limiter = "3.3.1"
|
||||
flask-login = "0.6.0"
|
||||
flask-mail = "0.9.1"
|
||||
flask-migrate = "2.5.2"
|
||||
flask-restful = "0.3.10"
|
||||
flask-sqlalchemy = "2.5.1"
|
||||
flask-talisman = "0.7.0"
|
||||
flask-wtf = "1.1.1"
|
||||
funcy = "1.13"
|
||||
gevent = "23.9.1"
|
||||
greenlet = "2.0.2"
|
||||
gunicorn = "22.0.0"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.5"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
parsedatetime = "2.4"
|
||||
passlib = "1.7.3"
|
||||
psycopg2-binary = "2.9.6"
|
||||
pyjwt = "2.4.0"
|
||||
pyopenssl = "24.2.1"
|
||||
pypd = "1.1.0"
|
||||
pysaml2 = "7.3.1"
|
||||
pystache = "0.6.0"
|
||||
python-dateutil = "2.8.0"
|
||||
python-dotenv = "0.19.2"
|
||||
pytz = ">=2019.3"
|
||||
pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.32.3"
|
||||
restrictedpython = "7.3"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
semver = "2.8.1"
|
||||
sentry-sdk = "1.45.1"
|
||||
sqlalchemy = "1.3.24"
|
||||
sqlalchemy-searchable = "1.2.0"
|
||||
sqlalchemy-utils = "0.38.3"
|
||||
sqlparse = "0.5.0"
|
||||
sshtunnel = "0.1.5"
|
||||
statsd = "3.3.0"
|
||||
supervisor = "4.1.0"
|
||||
supervisor-checks = "0.8.1"
|
||||
ua-parser = "0.18.0"
|
||||
urllib3 = "1.26.19"
|
||||
user-agents = "2.0"
|
||||
werkzeug = "2.3.8"
|
||||
wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
debugpy = "^1.8.9"
|
||||
paramiko = "3.4.1"
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.all_ds.dependencies]
|
||||
atsd-client = "3.0.5"
|
||||
azure-kusto-data = "0.0.35"
|
||||
boto3 = "1.28.8"
|
||||
botocore = "1.31.8"
|
||||
cassandra-driver = "3.21.0"
|
||||
certifi = ">=2019.9.11"
|
||||
cmem-cmempy = "21.2.3"
|
||||
databend-py = "0.4.6"
|
||||
databend-sqlalchemy = "0.2.4"
|
||||
google-api-python-client = "1.7.11"
|
||||
gspread = "5.11.2"
|
||||
impyla = "0.16.0"
|
||||
influxdb = "5.2.3"
|
||||
influxdb-client = "1.38.0"
|
||||
memsql = "3.2.0"
|
||||
mysqlclient = "2.1.1"
|
||||
nzalchemy = "^11.0.2"
|
||||
nzpy = ">=1.15"
|
||||
oauth2client = "4.1.3"
|
||||
openpyxl = "3.0.7"
|
||||
oracledb = "2.1.2"
|
||||
pandas = "1.3.4"
|
||||
phoenixdb = "0.7"
|
||||
pinotdb = ">=0.4.5"
|
||||
protobuf = "3.20.2"
|
||||
pyathena = "2.25.2"
|
||||
pydgraph = "2.0.2"
|
||||
pydruid = "0.5.7"
|
||||
pyexasol = "0.12.0"
|
||||
pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymssql = "^2.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.20"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
snowflake-connector-python = "3.13.1"
|
||||
td-client = "1.0.0"
|
||||
thrift = ">=0.8.0"
|
||||
thrift-sasl = ">=0.1.0"
|
||||
trino = ">=0.305,<1.0"
|
||||
vertica-python = "1.1.1"
|
||||
xlrd = "2.0.1"
|
||||
e6data-python-connector = "1.1.9"
|
||||
|
||||
[tool.poetry.group.ldap3]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.ldap3.dependencies]
|
||||
ldap3 = "2.9.1"
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "7.4.0"
|
||||
coverage = "7.2.7"
|
||||
freezegun = "1.2.1"
|
||||
jwcrypto = "1.5.6"
|
||||
mock = "5.0.2"
|
||||
pre-commit = "3.3.3"
|
||||
ptpython = "3.0.23"
|
||||
pytest-cov = "4.1.0"
|
||||
watchdog = "3.0.0"
|
||||
ruff = "0.0.289"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.ruff]
|
||||
exclude = [".git", "viz-lib", "node_modules", "migrations"]
|
||||
ignore = ["E501"]
|
||||
select = ["C9", "E", "F", "W", "I001", "UP004"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 15
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"__init__.py" = ["F401"]
|
||||
|
||||
@@ -14,13 +14,14 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "23.09.0-dev"
|
||||
__version__ = "25.02.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
import ptvsd
|
||||
import debugpy
|
||||
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 5678))
|
||||
debugpy.listen(("0.0.0.0", 5678))
|
||||
debugpy.wait_for_client()
|
||||
|
||||
|
||||
def setup_logging():
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger("jwt_auth")
|
||||
|
||||
@@ -25,7 +25,7 @@ def get_public_key_from_net(url):
|
||||
if "keys" in data:
|
||||
public_keys = []
|
||||
for key_dict in data["keys"]:
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(simplejson.dumps(key_dict))
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(json.dumps(key_dict))
|
||||
public_keys.append(public_key)
|
||||
|
||||
get_public_keys.key_cache[url] = public_keys
|
||||
|
||||
@@ -8,10 +8,11 @@ from redash import settings
|
||||
|
||||
try:
|
||||
from ldap3 import Connection, Server
|
||||
from ldap3.utils.conv import escape_filter_chars
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit(
|
||||
"The ldap3 library was not found. This is required to use LDAP authentication (see requirements.txt)."
|
||||
"The ldap3 library was not found. This is required to use LDAP authentication. Rebuild the Docker image installing the `ldap3` poetry dependency group."
|
||||
)
|
||||
|
||||
from redash.authentication import (
|
||||
@@ -69,6 +70,7 @@ def login(org_slug=None):
|
||||
|
||||
|
||||
def auth_ldap_user(username, password):
|
||||
clean_username = escape_filter_chars(username)
|
||||
server = Server(settings.LDAP_HOST_URL, use_ssl=settings.LDAP_SSL)
|
||||
if settings.LDAP_BIND_DN is not None:
|
||||
conn = Connection(
|
||||
@@ -83,7 +85,7 @@ def auth_ldap_user(username, password):
|
||||
|
||||
conn.search(
|
||||
settings.LDAP_SEARCH_DN,
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": username},
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": clean_username},
|
||||
attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY],
|
||||
)
|
||||
|
||||
|
||||
@@ -90,8 +90,8 @@ def get_saml_client(org):
|
||||
|
||||
saml_settings["metadata"] = {"inline": [metadata_inline]}
|
||||
|
||||
if acs_url is not None and acs_url != "":
|
||||
saml_settings["entityid"] = acs_url
|
||||
if entity_id is not None and entity_id != "":
|
||||
saml_settings["entityid"] = entity_id
|
||||
|
||||
if sp_settings:
|
||||
import json
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
|
||||
import click
|
||||
import simplejson
|
||||
from flask import current_app
|
||||
from flask.cli import FlaskGroup, run_command, with_appcontext
|
||||
from rq import Connection
|
||||
@@ -53,7 +54,7 @@ def version():
|
||||
@manager.command()
|
||||
def status():
|
||||
with Connection(rq_redis_connection):
|
||||
print(simplejson.dumps(get_status(), indent=2))
|
||||
print(json.dumps(get_status(), indent=2))
|
||||
|
||||
|
||||
@manager.command()
|
||||
|
||||
@@ -5,6 +5,22 @@ from sqlalchemy.orm.exc import NoResultFound
|
||||
manager = AppGroup(help="Queries management commands.")
|
||||
|
||||
|
||||
@manager.command(name="rehash")
|
||||
def rehash():
|
||||
from redash import models
|
||||
|
||||
for q in models.Query.query.all():
|
||||
old_hash = q.query_hash
|
||||
q.update_query_hash()
|
||||
new_hash = q.query_hash
|
||||
|
||||
if old_hash != new_hash:
|
||||
print(f"Query {q.id} has changed hash from {old_hash} to {new_hash}")
|
||||
models.db.session.add(q)
|
||||
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
@manager.command(name="add_tag")
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
|
||||
@@ -5,7 +5,7 @@ logger = logging.getLogger(__name__)
|
||||
__all__ = ["BaseDestination", "register", "get_destination", "import_destinations"]
|
||||
|
||||
|
||||
class BaseDestination(object):
|
||||
class BaseDestination:
|
||||
deprecated = False
|
||||
|
||||
def __init__(self, configuration):
|
||||
|
||||
93
redash/destinations/datadog.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
from redash.destinations import BaseDestination, register
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class Datadog(BaseDestination):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"api_key": {"type": "string", "title": "API Key"},
|
||||
"tags": {"type": "string", "title": "Tags"},
|
||||
"priority": {"type": "string", "default": "normal", "title": "Priority"},
|
||||
# https://docs.datadoghq.com/integrations/faq/list-of-api-source-attribute-value/
|
||||
"source_type_name": {"type": "string", "default": "my_apps", "title": "Source Type Name"},
|
||||
},
|
||||
"secret": ["api_key"],
|
||||
"required": ["api_key"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return "fa-datadog"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, metadata, options):
|
||||
# Documentation: https://docs.datadoghq.com/api/latest/events/#post-an-event
|
||||
if new_state == "triggered":
|
||||
alert_type = "error"
|
||||
if alert.custom_subject:
|
||||
title = alert.custom_subject
|
||||
else:
|
||||
title = f"{alert.name} just triggered"
|
||||
else:
|
||||
alert_type = "success"
|
||||
if alert.custom_subject:
|
||||
title = alert.custom_subject
|
||||
else:
|
||||
title = f"{alert.name} went back to normal"
|
||||
|
||||
if alert.custom_body:
|
||||
text = alert.custom_body
|
||||
else:
|
||||
text = f"{alert.name} changed state to {new_state}."
|
||||
|
||||
query_url = f"{host}/queries/{query.id}"
|
||||
alert_url = f"{host}/alerts/{alert.id}"
|
||||
text += f"\nQuery: {query_url}\nAlert: {alert_url}"
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"DD-API-KEY": options.get("api_key"),
|
||||
}
|
||||
|
||||
body = {
|
||||
"title": title,
|
||||
"text": text,
|
||||
"alert_type": alert_type,
|
||||
"priority": options.get("priority"),
|
||||
"source_type_name": options.get("source_type_name"),
|
||||
"aggregation_key": f"redash:{alert_url}",
|
||||
"tags": [],
|
||||
}
|
||||
|
||||
tags = options.get("tags")
|
||||
if tags:
|
||||
body["tags"] = tags.split(",")
|
||||
body["tags"].extend(
|
||||
[
|
||||
"redash",
|
||||
f"query_id:{query.id}",
|
||||
f"alert_id:{alert.id}",
|
||||
]
|
||||
)
|
||||
|
||||
dd_host = os.getenv("DATADOG_HOST", "api.datadoghq.com")
|
||||
url = f"https://{dd_host}/api/v1/events"
|
||||
|
||||
try:
|
||||
resp = requests.post(url, headers=headers, data=json_dumps(body), timeout=5.0)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 202:
|
||||
logging.error(f"Datadog send ERROR. status_code => {resp.status_code}")
|
||||
except Exception as e:
|
||||
logging.exception("Datadog send ERROR: %s", e)
|
||||
|
||||
|
||||
register(Datadog)
|
||||
@@ -42,8 +42,8 @@ class Discord(BaseDestination):
|
||||
"inline": True,
|
||||
},
|
||||
]
|
||||
if alert.options.get("custom_body"):
|
||||
fields.append({"name": "Description", "value": alert.options["custom_body"]})
|
||||
if alert.custom_body:
|
||||
fields.append({"name": "Description", "value": alert.custom_body})
|
||||
if new_state == Alert.TRIGGERED_STATE:
|
||||
if alert.options.get("custom_subject"):
|
||||
text = alert.options["custom_subject"]
|
||||
|
||||
@@ -26,13 +26,13 @@ class Slack(BaseDestination):
|
||||
fields = [
|
||||
{
|
||||
"title": "Query",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
"title": "Alert",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
|
||||
"short": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
@@ -50,7 +50,7 @@ class Slack(BaseDestination):
|
||||
payload = {"attachments": [{"text": text, "color": color, "fields": fields}]}
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload), timeout=5.0)
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload).encode("utf-8"), timeout=5.0)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
|
||||