Compare commits
173 Commits
23.09.0-de
...
24.05.0-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b974e12ed | ||
|
|
372adfed6b | ||
|
|
dbab9cadb4 | ||
|
|
06244716e6 | ||
|
|
f09760389a | ||
|
|
84e6d3cad5 | ||
|
|
3399e3761e | ||
|
|
1c48b2218b | ||
|
|
5ac5d86f5e | ||
|
|
5e4764af9c | ||
|
|
e2a39de7d1 | ||
|
|
6c68b48917 | ||
|
|
7e8a61c73d | ||
|
|
991e94dd6a | ||
|
|
2ffeecb813 | ||
|
|
3dd855aef1 | ||
|
|
713aca440a | ||
|
|
70bb684d9e | ||
|
|
4034f791c3 | ||
|
|
b9875a231b | ||
|
|
062a70cf20 | ||
|
|
c12d45077a | ||
|
|
6d6412753d | ||
|
|
275e12e7c1 | ||
|
|
77d7508cee | ||
|
|
9601660751 | ||
|
|
45c6fa0591 | ||
|
|
95ecb8e229 | ||
|
|
cb0707176c | ||
|
|
d7247f8b84 | ||
|
|
776703fab7 | ||
|
|
34cde71238 | ||
|
|
f631075be3 | ||
|
|
3f19534301 | ||
|
|
24dec192ee | ||
|
|
82d88ed4eb | ||
|
|
af0773c58a | ||
|
|
15e6583d72 | ||
|
|
4eb5f4e47f | ||
|
|
a0f5c706ff | ||
|
|
702a550659 | ||
|
|
38a06c7ab9 | ||
|
|
a6074878bb | ||
|
|
fb348c7116 | ||
|
|
24419863ec | ||
|
|
c4d3d9c683 | ||
|
|
1672cd9280 | ||
|
|
6575a6499a | ||
|
|
e360e4658e | ||
|
|
107933c363 | ||
|
|
667a696ca5 | ||
|
|
7d0d242072 | ||
|
|
d554136f70 | ||
|
|
34723e2f3e | ||
|
|
11794b3fe3 | ||
|
|
3997916d77 | ||
|
|
b09a2256dc | ||
|
|
95a45bb4dc | ||
|
|
7cd03c797c | ||
|
|
1200f9887a | ||
|
|
81d22f1eb2 | ||
|
|
2fe0326280 | ||
|
|
094984f564 | ||
|
|
52cd6ff006 | ||
|
|
939bec2114 | ||
|
|
320fddfd52 | ||
|
|
ab39283ae6 | ||
|
|
6386905616 | ||
|
|
d986b976e5 | ||
|
|
a600921c0b | ||
|
|
af2f4af8a2 | ||
|
|
49a5e74283 | ||
|
|
b98b5f2ba4 | ||
|
|
d245ff7bb1 | ||
|
|
97db492531 | ||
|
|
30e7392933 | ||
|
|
a54171f2c2 | ||
|
|
cd03da3260 | ||
|
|
4c47bef582 | ||
|
|
ec1c4d07de | ||
|
|
4d5103978b | ||
|
|
3c2c2786ed | ||
|
|
cd482e780a | ||
|
|
4d81c3148d | ||
|
|
1b1b9bd98d | ||
|
|
473cf29c9f | ||
|
|
cbde237b12 | ||
|
|
998dc31eb0 | ||
|
|
2505e8ab3b | ||
|
|
858fc4d78f | ||
|
|
3e500ea18e | ||
|
|
58bf96c298 | ||
|
|
66ef942572 | ||
|
|
9bbdb4b765 | ||
|
|
2b4b1cf7e3 | ||
|
|
9b29f26217 | ||
|
|
392b930f2d | ||
|
|
9df6f80bb7 | ||
|
|
f7b47c0436 | ||
|
|
09addaadc3 | ||
|
|
a07b8a6bd3 | ||
|
|
8bfc57430d | ||
|
|
a8c6dd0043 | ||
|
|
2d879510e4 | ||
|
|
13e61fc3a0 | ||
|
|
de1958e995 | ||
|
|
198b422eaf | ||
|
|
63cef6632e | ||
|
|
2611dcc0f1 | ||
|
|
55193fbf66 | ||
|
|
8b8dd4f68c | ||
|
|
ae77e72821 | ||
|
|
39e4ea155c | ||
|
|
a5b01bf8ee | ||
|
|
5516b427d8 | ||
|
|
de84c40868 | ||
|
|
39766a2d97 | ||
|
|
593b6ae6ed | ||
|
|
8bb1767c69 | ||
|
|
7b03e60f9d | ||
|
|
ac9f24a781 | ||
|
|
54c4a4249a | ||
|
|
36dd3e9609 | ||
|
|
69d1e03e60 | ||
|
|
a2c0c488eb | ||
|
|
ddbe0f6ce5 | ||
|
|
42108089ed | ||
|
|
d4ade51fba | ||
|
|
84d1693419 | ||
|
|
12f1050000 | ||
|
|
6b981972f0 | ||
|
|
eafe30d52c | ||
|
|
abbd4d3146 | ||
|
|
1d350853bd | ||
|
|
3edf7790fc | ||
|
|
011f9ef311 | ||
|
|
138339a8a4 | ||
|
|
0f175b7a5b | ||
|
|
0c2dc4e025 | ||
|
|
a19b17b844 | ||
|
|
09ec299e65 | ||
|
|
9461bf6479 | ||
|
|
1ae4e20d70 | ||
|
|
3d32c55531 | ||
|
|
4a36abc628 | ||
|
|
3ebf163c29 | ||
|
|
c3c54f6ca2 | ||
|
|
b13772c464 | ||
|
|
74b0f8bb58 | ||
|
|
953ed8431b | ||
|
|
0ca72d27e6 | ||
|
|
650ec90df3 | ||
|
|
b84587931f | ||
|
|
ff85a36f50 | ||
|
|
6d91c64dae | ||
|
|
ca36130e76 | ||
|
|
0993f68fa0 | ||
|
|
f109af9f30 | ||
|
|
b4e4a5a928 | ||
|
|
ca900769c2 | ||
|
|
c97afeb327 | ||
|
|
b1f738fc96 | ||
|
|
6f6d203ca9 | ||
|
|
36482f6717 | ||
|
|
3fee9f6cef | ||
|
|
cb4af6dd57 | ||
|
|
7f42bf1b65 | ||
|
|
41495ba940 | ||
|
|
9b18e1805c | ||
|
|
c2e7df098d | ||
|
|
f1d5ac0d58 | ||
|
|
710dd8c51b | ||
|
|
2a2c90a014 |
@@ -1,26 +0,0 @@
|
||||
version: '2.2'
|
||||
services:
|
||||
redash:
|
||||
build: ../
|
||||
command: manage version
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5000:5000"
|
||||
environment:
|
||||
PYTHONUNBUFFERED: 0
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
@@ -1,75 +0,0 @@
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: ../
|
||||
args:
|
||||
skip_dev_deps: "true"
|
||||
skip_ds_deps: "true"
|
||||
code_coverage: ${CODE_COVERAGE}
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
services:
|
||||
server:
|
||||
<<: *redash-service
|
||||
command: server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5000:5000"
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
scheduler:
|
||||
<<: *redash-service
|
||||
command: scheduler
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
worker:
|
||||
<<: *redash-service
|
||||
command: worker
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
cypress:
|
||||
ipc: host
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: .ci/Dockerfile.cypress
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
- scheduler
|
||||
environment:
|
||||
CYPRESS_baseUrl: "http://server:5000"
|
||||
CYPRESS_coverage: ${CODE_COVERAGE}
|
||||
PERCY_TOKEN: ${PERCY_TOKEN}
|
||||
PERCY_BRANCH: ${CIRCLE_BRANCH}
|
||||
PERCY_COMMIT: ${CIRCLE_SHA1}
|
||||
PERCY_PULL_REQUEST: ${CIRCLE_PR_NUMBER}
|
||||
COMMIT_INFO_BRANCH: ${CIRCLE_BRANCH}
|
||||
COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE}
|
||||
COMMIT_INFO_AUTHOR: ${CIRCLE_USERNAME}
|
||||
COMMIT_INFO_SHA: ${CIRCLE_SHA1}
|
||||
COMMIT_INFO_REMOTE: ${CIRCLE_REPOSITORY_URL}
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY}
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG=$VERSION.b$CIRCLE_BUILD_NUM
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||
|
||||
if [ $CIRCLE_BRANCH = master ] || [ $CIRCLE_BRANCH = preview-image ]
|
||||
then
|
||||
docker build --build-arg skip_dev_deps=true -t redash/redash:preview -t redash/preview:$VERSION_TAG .
|
||||
docker push redash/redash:preview
|
||||
docker push redash/preview:$VERSION_TAG
|
||||
else
|
||||
docker build --build-arg skip_dev_deps=true -t redash/redash:$VERSION_TAG .
|
||||
docker push redash/redash:$VERSION_TAG
|
||||
fi
|
||||
|
||||
echo "Built: $VERSION_TAG"
|
||||
9
.ci/pack
@@ -1,9 +0,0 @@
|
||||
#!/bin/bash
|
||||
NAME=redash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
|
||||
FILENAME=$NAME.$FULL_VERSION.tar.gz
|
||||
|
||||
mkdir -p /tmp/artifacts/
|
||||
|
||||
tar -zcv -f /tmp/artifacts/$FILENAME --exclude=".git" --exclude="optipng*" --exclude="cypress" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" *
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
|
||||
|
||||
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '$FULL_VERSION'/" redash/__init__.py
|
||||
sed -i "s/dev/$CIRCLE_SHA1/" client/app/version.json
|
||||
@@ -1,5 +1,4 @@
|
||||
client/.tmp/
|
||||
client/dist/
|
||||
node_modules/
|
||||
viz-lib/node_modules/
|
||||
.tmp/
|
||||
|
||||
147
.github/workflows/ci.yml
vendored
@@ -3,48 +3,73 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
tags:
|
||||
- '*'
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
env:
|
||||
NODE_VERSION: 16.20.1
|
||||
CYPRESS_COVERAGE: "true"
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
REDASH_COOKIE_SECRET: 2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF
|
||||
REDASH_SECRET_KEY: 2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
FRONTEND_BUILD_MODE: 1
|
||||
INSTALL_GROUPS: main,all_ds,dev
|
||||
PERCY_BRANCH: ${{github.head_ref || github.ref_name}}
|
||||
PERCY_COMMIT: ${{github.sha}}
|
||||
PERCY_PULL_REQUEST: ${{github.event.number}}
|
||||
COMMIT_INFO_BRANCH: ${{github.head_ref || github.ref_name}}
|
||||
COMMIT_INFO_MESSAGE: ${{github.event.head_commit.message}}
|
||||
COMMIT_INFO_AUTHOR: ${{github.event.pull_request.user.login}}
|
||||
COMMIT_INFO_SHA: ${{github.sha}}
|
||||
COMMIT_INFO_REMOTE: ${{github.server_url}}/${{github.repository}}
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-python@v4
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install flake8==6.1.0 black==23.1.0 isort==5.12.0
|
||||
- run: flake8 .
|
||||
- run: sudo pip install black==24.3.0 ruff==0.1.9
|
||||
- run: ruff check .
|
||||
- run: black --check .
|
||||
- run: isort --check-only --diff .
|
||||
|
||||
backend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/docker-compose.ci.yml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
FRONTEND_BUILD_MODE: 0
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
docker-compose build --build-arg test_all_deps=true --build-arg skip_frontend_build=true
|
||||
docker-compose up -d
|
||||
touch .env
|
||||
docker compose build
|
||||
docker compose up -d
|
||||
sleep 10
|
||||
- name: Create Test Database
|
||||
run: docker-compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
run: docker compose run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- name: List Enabled Query Runners
|
||||
run: docker-compose -p redash run --rm redash manage ds list_types
|
||||
run: docker compose run --rm server manage ds list_types
|
||||
- name: Run Tests
|
||||
run: docker-compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
run: docker compose run --name tests server tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
- name: Copy Test Results
|
||||
run: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
@@ -52,102 +77,124 @@ jobs:
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-results
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: backend-test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
name: backend-coverage
|
||||
path: coverage.xml
|
||||
|
||||
frontend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: frontend-test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
- name: Run Visualizations Tests
|
||||
run: cd viz-lib && yarn test
|
||||
run: |
|
||||
cd viz-lib
|
||||
yarn test
|
||||
- run: yarn lint
|
||||
|
||||
frontend-e2e-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/docker-compose.cypress.yml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
|
||||
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
|
||||
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
INSTALL_GROUPS: main
|
||||
COMPOSE_PROFILES: e2e
|
||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Enable Code Coverage Report For Master Branch
|
||||
if: endsWith(github.ref, '/master')
|
||||
run: |
|
||||
echo "CODE_COVERAGE=true" >> $GITHUB_ENV
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
set -x
|
||||
touch .env
|
||||
yarn build
|
||||
yarn cypress build
|
||||
yarn cypress start -- --skip-db-seed
|
||||
docker-compose run cypress yarn cypress db-seed
|
||||
docker compose run cypress yarn cypress db-seed
|
||||
- name: Execute Cypress Tests
|
||||
run: yarn cypress run-ci
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker-compose logs
|
||||
run: docker compose logs
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
name: frontend-coverage
|
||||
path: coverage
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend
|
||||
path: client/dist
|
||||
retention-days: 1
|
||||
|
||||
12
.github/workflows/periodic-snapshot.yml
vendored
@@ -12,15 +12,17 @@ jobs:
|
||||
bump-version-and-tag:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ssh-key: ${{secrets.ACTION_PUSH_KEY}}
|
||||
- run: |
|
||||
date="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=$date '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add package.json redash/__init__.py
|
||||
git commit -m "Shapshot: ${date}"
|
||||
git push origin
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${date}"
|
||||
git tag $date
|
||||
git push origin $date
|
||||
git push --atomic origin master refs/tags/$date
|
||||
|
||||
154
.github/workflows/preview-image.yml
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
name: Preview Image
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- Tests
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
DOCKER_REPO: redash
|
||||
|
||||
jobs:
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
- name: Skip?
|
||||
id: skip-check
|
||||
run: |
|
||||
if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then
|
||||
echo 'Docker user is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
echo skip=false >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.VERSION_TAG }}
|
||||
repo: ${{ steps.version.outputs.DOCKER_REPO }}
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.push.after }}
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
name: frontend
|
||||
workflow: ci.yml
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
path: client/dist
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=${VERSION}-b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}
|
||||
sed -ri "s/^__version__ = ([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py
|
||||
sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json
|
||||
echo "VERSION_TAG=$FULL_VERSION" >> "$GITHUB_OUTPUT"
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
echo "SCOPE=${platform//\//-}" >> $GITHUB_ENV
|
||||
if [[ "${{ vars.DOCKER_REPO }}" != "" ]]; then
|
||||
echo "DOCKER_REPO=${{ vars.DOCKER_REPO }}" >> $GITHUB_ENV
|
||||
echo "DOCKER_REPO=${{ vars.DOCKER_REPO }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DOCKER_REPO=${DOCKER_REPO}" >> $GITHUB_ENV
|
||||
echo "DOCKER_REPO=${DOCKER_REPO}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Build and push preview image to Docker Hub
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
cache-from: type=gha,scope=${{ env.SCOPE }}
|
||||
cache-to: type=gha,mode=max,scope=${{ env.SCOPE }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,name=${{ env.DOCKER_REPO }}/redash,push-by-digest=true,name-canonical=true,push=true
|
||||
build-args: |
|
||||
FRONTEND_BUILD_MODE=1
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
publish-docker-manifest:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
- build-docker-image
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: digests-*
|
||||
path: /tmp/digests
|
||||
merge-multiple: true
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ needs.build-docker-image.outputs.repo }}/redash
|
||||
tags: preview
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ needs.build-docker-image.outputs.repo }}/redash@sha256:%s ' *)
|
||||
- name: Inspect image
|
||||
run: |
|
||||
REDASH_IMAGE="${{ needs.build-docker-image.outputs.repo }}/redash:${{ steps.meta.outputs.version }}"
|
||||
docker buildx imagetools inspect $REDASH_IMAGE
|
||||
- name: Push image ${{ needs.build-docker-image.outputs.repo }}/preview image
|
||||
run: |
|
||||
REDASH_IMAGE="${{ needs.build-docker-image.outputs.repo }}/redash:preview"
|
||||
PREVIEW_IMAGE="${{ needs.build-docker-image.outputs.repo }}/preview:${{ needs.build-docker-image.outputs.version }}"
|
||||
docker buildx imagetools create --tag $PREVIEW_IMAGE $REDASH_IMAGE
|
||||
@@ -1,19 +1,10 @@
|
||||
repos:
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.1.0
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: "v0.0.287"
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: "migration/.*|.git|viz-lib|node_modules|migrations|bin/upgrade"
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: requirements-txt-fixer
|
||||
- id: ruff
|
||||
|
||||
87
Dockerfile
@@ -1,46 +1,46 @@
|
||||
FROM node:16.20.1 as frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.19
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
ARG FRONTEND_BUILD_MODE=0
|
||||
|
||||
# MODE 0: create empty files. useful for backend tests
|
||||
FROM alpine:3.19 as frontend-builder-0
|
||||
RUN \
|
||||
mkdir -p /frontend/client/dist && \
|
||||
touch /frontend/client/dist/multi_org.html && \
|
||||
touch /frontend/client/dist/index.html
|
||||
|
||||
# MODE 1: copy static frontend from host, useful for CI to ignore building static content multiple times
|
||||
FROM alpine:3.19 as frontend-builder-1
|
||||
COPY client/dist /frontend/client/dist
|
||||
|
||||
# MODE 2: build static content in docker, can be used for a local development
|
||||
FROM node:18-bookworm as frontend-builder-2
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
ENV CYPRESS_INSTALL_BINARY=0
|
||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||
|
||||
RUN useradd -m -d /frontend redash
|
||||
USER redash
|
||||
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
COPY --chown=redash scripts /frontend/scripts
|
||||
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
|
||||
RUN yarn --frozen-lockfile --network-concurrency 1;
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
RUN yarn build
|
||||
|
||||
FROM python:3.8-slim-buster
|
||||
FROM frontend-builder-${FRONTEND_BUILD_MODE} as frontend-builder
|
||||
|
||||
FROM python:3.8-slim-bookworm
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
# Controls whether to install extra dependencies needed for all data sources.
|
||||
ARG skip_ds_deps
|
||||
# Controls whether to install dev dependencies.
|
||||
ARG skip_dev_deps
|
||||
# Controls whether to install all dependencies for testing.
|
||||
ARG test_all_deps
|
||||
|
||||
RUN useradd --create-home redash
|
||||
|
||||
# Ubuntu packages
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
pkg-config \
|
||||
curl \
|
||||
gnupg \
|
||||
build-essential \
|
||||
@@ -48,6 +48,8 @@ RUN apt-get update && \
|
||||
libffi-dev \
|
||||
sudo \
|
||||
git-core \
|
||||
# Kerberos, needed for MS SQL Python driver to compile on arm64
|
||||
libkrb5-dev \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# ODBC support:
|
||||
@@ -64,17 +66,18 @@ RUN apt-get update && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN \
|
||||
curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg && \
|
||||
apt update && \
|
||||
ACCEPT_EULA=Y apt install -y --no-install-recommends msodbcsql18 && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
|
||||
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/simba \
|
||||
&& dpkg -i /tmp/simba/*.deb \
|
||||
@@ -84,24 +87,18 @@ RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Disable PIP Cache and Version Check
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV PIP_NO_CACHE_DIR=1
|
||||
ENV POETRY_VERSION=1.6.1
|
||||
ENV POETRY_HOME=/etc/poetry
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
RUN pip install pip==23.1.2;
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
|
||||
# We first copy only the requirements file, to avoid rebuilding on every file change.
|
||||
COPY requirements_all_ds.txt ./
|
||||
RUN if [ "x$skip_ds_deps" = "x" ] ; then cat requirements_all_ds.txt | sed -e '/^\s*#.*$/d' -e '/^\s*$/d' | xargs -n 1 pip install || true ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi
|
||||
|
||||
|
||||
COPY requirements_dev.txt ./
|
||||
RUN if [ "x$skip_dev_deps" = "x" ] ; then pip install -r requirements_dev.txt ; fi
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
RUN if [ "x$test_all_deps" != "x" ] ; then pip3 install -r requirements.txt -r requirements_dev.txt -r requirements_all_ds.txt ; fi
|
||||
ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi"
|
||||
# for LDAP authentication, install with `ldap3` group
|
||||
# disabled by default due to GPL license conflict
|
||||
ARG INSTALL_GROUPS="main,all_ds,dev"
|
||||
RUN /etc/poetry/bin/poetry install --only $INSTALL_GROUPS $POETRY_OPTIONS
|
||||
|
||||
COPY --chown=redash . /app
|
||||
COPY --from=frontend-builder --chown=redash /frontend/client/dist /app/client/dist
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
FROM cypress/browsers:node16.18.0-chrome90-ff88
|
||||
FROM cypress/browsers:node18.12.0-chrome106-ff106
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json yarn.lock .yarnrc $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm install yarn@1.22.19 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
RUN npm install yarn@1.22.22 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
67
Makefile
@@ -1,26 +1,50 @@
|
||||
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database create_db clean clean-all down tests lint backend-unit-tests frontend-unit-tests pydeps test build watch start redis-cli bash
|
||||
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_PROFILES=local
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose build
|
||||
docker compose build
|
||||
|
||||
up:
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose up -d --build
|
||||
docker compose up -d redis postgres
|
||||
docker compose exec -u postgres postgres psql postgres --csv \
|
||||
-1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \
|
||||
| grep -q "organizations" || make create_database
|
||||
docker compose up -d --build
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
if (docker-compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \
|
||||
if (docker compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \
|
||||
else echo "postgres initializing..."; sleep 5; fi \
|
||||
done
|
||||
docker-compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
docker compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
|
||||
create_database: .env
|
||||
docker-compose run server create_db
|
||||
create_db: .env
|
||||
docker compose run server create_db
|
||||
|
||||
create_database: create_db
|
||||
|
||||
clean:
|
||||
docker-compose down && docker-compose rm
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
docker compose rm --stop --force
|
||||
docker compose --project-name cypress rm --stop --force
|
||||
docker image rm --force \
|
||||
cypress-server:latest cypress-worker:latest cypress-scheduler:latest \
|
||||
redash-server:latest redash-worker:latest redash-scheduler:latest
|
||||
docker container prune --force
|
||||
docker image prune --force
|
||||
docker volume prune --force
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
|
||||
down:
|
||||
docker-compose down
|
||||
docker compose down
|
||||
|
||||
.env:
|
||||
printf "REDASH_COOKIE_SECRET=`pwgen -1s 32`\nREDASH_SECRET_KEY=`pwgen -1s 32`\n" >> .env
|
||||
@@ -30,32 +54,39 @@ env: .env
|
||||
format:
|
||||
pre-commit run --all-files
|
||||
|
||||
pydeps:
|
||||
pip3 install wheel
|
||||
pip3 install --upgrade black ruff launchpadlib pip setuptools
|
||||
pip3 install poetry
|
||||
poetry install --only main,all_ds,dev
|
||||
|
||||
tests:
|
||||
docker-compose run server tests
|
||||
docker compose run server tests
|
||||
|
||||
lint:
|
||||
./bin/flake8_tests.sh
|
||||
ruff check .
|
||||
black --check . --diff
|
||||
|
||||
backend-unit-tests: up test_db
|
||||
docker-compose run --rm --name tests server tests
|
||||
docker compose run --rm --name tests server tests
|
||||
|
||||
frontend-unit-tests:
|
||||
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 yarn --frozen-lockfile
|
||||
yarn test
|
||||
|
||||
test: lint backend-unit-tests frontend-unit-tests
|
||||
test: backend-unit-tests frontend-unit-tests lint
|
||||
|
||||
build:
|
||||
build:
|
||||
yarn build
|
||||
|
||||
watch:
|
||||
watch:
|
||||
yarn watch
|
||||
|
||||
start:
|
||||
start:
|
||||
yarn start
|
||||
|
||||
redis-cli:
|
||||
docker-compose run --rm redis redis-cli -h redis
|
||||
docker compose run --rm redis redis-cli -h redis
|
||||
|
||||
bash:
|
||||
docker-compose run --rm server bash
|
||||
docker compose run --rm server bash
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
</p>
|
||||
|
||||
[](https://redash.io/help/)
|
||||
[](https://datree.io/?src=badge)
|
||||
[](https://github.com/getredash/redash/actions)
|
||||
|
||||
Redash is designed to enable anyone, regardless of the level of technical sophistication, to harness the power of data big and small. SQL users leverage Redash to explore, query, visualize, and share data from any data sources. Their work in turn enables anybody in their organization to use the data. Every day, millions of users at thousands of organizations around the world use Redash to develop insights and make data-driven decisions.
|
||||
@@ -47,6 +46,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Dgraph
|
||||
- Apache Drill
|
||||
- Apache Druid
|
||||
- e6data
|
||||
- Eccenca Corporate Memory
|
||||
- Elasticsearch
|
||||
- Exasol
|
||||
@@ -61,6 +61,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Apache Hive
|
||||
- Apache Impala
|
||||
- InfluxDB
|
||||
- InfluxDBv2
|
||||
- IBM Netezza Performance Server
|
||||
- JIRA (JQL)
|
||||
- JSON
|
||||
@@ -83,6 +84,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Python
|
||||
- Qubole
|
||||
- Rockset
|
||||
- RisingWave
|
||||
- Salesforce
|
||||
- ScyllaDB
|
||||
- Shell Scripts
|
||||
@@ -90,6 +92,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- SPARQL
|
||||
- SQLite
|
||||
- TiDB
|
||||
- Tinybird
|
||||
- TreasureData
|
||||
- Trino
|
||||
- Uptycs
|
||||
|
||||
@@ -1,25 +1,48 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
if [ -z $REDASH_REDIS_URL ]; then
|
||||
export REDASH_REDIS_URL=redis://:${REDASH_REDIS_PASSWORD}@${REDASH_REDIS_HOSTNAME}:${REDASH_REDIS_PORT}/${REDASH_REDIS_NAME}
|
||||
fi
|
||||
|
||||
if [ -z $REDASH_DATABASE_URL ]; then
|
||||
export REDASH_DATABASE_URL=postgresql://${REDASH_DATABASE_USER}:${REDASH_DATABASE_PASSWORD}@${REDASH_DATABASE_HOSTNAME}:${REDASH_DATABASE_PORT}/${REDASH_DATABASE_NAME}
|
||||
fi
|
||||
|
||||
scheduler() {
|
||||
echo "Starting RQ scheduler..."
|
||||
|
||||
exec /app/manage.py rq scheduler
|
||||
}
|
||||
|
||||
dev_scheduler() {
|
||||
echo "Starting dev RQ scheduler..."
|
||||
|
||||
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq scheduler
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting RQ scheduler in production mode"
|
||||
exec ./manage.py rq scheduler
|
||||
;;
|
||||
*)
|
||||
echo "Starting RQ scheduler in dev mode"
|
||||
exec watchmedo auto-restart \
|
||||
--directory=./redash/ \
|
||||
--pattern=*.py \
|
||||
--recursive -- ./manage.py rq scheduler $QUEUES
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
worker() {
|
||||
echo "Starting RQ worker..."
|
||||
|
||||
export WORKERS_COUNT=${WORKERS_COUNT:-2}
|
||||
export QUEUES=${QUEUES:-}
|
||||
|
||||
exec supervisord -c worker.conf
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting RQ worker in production mode"
|
||||
exec supervisord -c worker.conf
|
||||
;;
|
||||
*)
|
||||
echo "Starting RQ worker in dev mode"
|
||||
exec watchmedo auto-restart \
|
||||
--directory=./redash/ \
|
||||
--pattern=*.py \
|
||||
--recursive -- ./manage.py rq worker $QUEUES
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
workers_healthcheck() {
|
||||
@@ -35,22 +58,63 @@ workers_healthcheck() {
|
||||
fi
|
||||
}
|
||||
|
||||
dev_worker() {
|
||||
echo "Starting dev RQ worker..."
|
||||
|
||||
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq worker $QUEUES
|
||||
}
|
||||
|
||||
server() {
|
||||
# Recycle gunicorn workers every n-th request. See http://docs.gunicorn.org/en/stable/settings.html#max-requests for more details.
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting Redash Server in production mode"
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn \
|
||||
-b 0.0.0.0:5000 \
|
||||
--name redash \
|
||||
-w${REDASH_WEB_WORKERS:-4} redash.wsgi:app \
|
||||
--max-requests $MAX_REQUESTS \
|
||||
--max-requests-jitter $MAX_REQUESTS_JITTER \
|
||||
--timeout $TIMEOUT
|
||||
;;
|
||||
*)
|
||||
echo "Starting Redash Server in a dev mode"
|
||||
export FLASK_DEBUG=1
|
||||
exec /app/manage.py runserver --debugger --reload -h 0.0.0.0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
create_db() {
|
||||
exec /app/manage.py database create_tables
|
||||
REDASH_DATABASE_MIGRATE_TIMEOUT=${REDASH_DATABASE_UPGRADE_TIMEOUT:-600}
|
||||
REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS=${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS:-5}
|
||||
REDASH_DATABASE_MIGRATE_RETRY_WAIT=${REDASH_DATABASE_MIGRATE_RETRY_WAIT:-10}
|
||||
ATTEMPTS=1
|
||||
while ((ATTEMPTS <= REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS)); do
|
||||
echo "Creating or updating Redash database, attempt ${ATTEMPTS} of ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS}"
|
||||
ATTEMPTS=$((ATTEMPTS+1))
|
||||
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py database create_tables
|
||||
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py db upgrade
|
||||
STATUS=$(timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py status 2>&1)
|
||||
RETCODE=$?
|
||||
case "$RETCODE" in
|
||||
0)
|
||||
exit 0
|
||||
;;
|
||||
124)
|
||||
echo "Status command timed out after ${REDASH_DATABASE_MIGRATE_TIMEOUT} seconds."
|
||||
;;
|
||||
esac
|
||||
case "$STATUS" in
|
||||
*sqlalchemy.exc.OperationalError*)
|
||||
echo "Database not yet functional, waiting."
|
||||
;;
|
||||
*sqlalchemy.exc.ProgrammingError*)
|
||||
echo "Database does not appear to be installed."
|
||||
;;
|
||||
esac
|
||||
echo "Waiting ${REDASH_DATABASE_MIGRATE_RETRY_WAIT} seconds before retrying."
|
||||
sleep ${REDASH_DATABASE_MIGRATE_RETRY_WAIT}
|
||||
done
|
||||
echo "Reached ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS} attempts, giving up."
|
||||
exit 1
|
||||
}
|
||||
|
||||
help() {
|
||||
@@ -61,21 +125,16 @@ help() {
|
||||
|
||||
echo "server -- start Redash server (with gunicorn)"
|
||||
echo "worker -- start a single RQ worker"
|
||||
echo "dev_worker -- start a single RQ worker with code reloading"
|
||||
echo "scheduler -- start an rq-scheduler instance"
|
||||
echo "dev_scheduler -- start an rq-scheduler instance with code reloading"
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "create_db -- create database tables"
|
||||
echo "create_db -- create database tables and run migrations"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
}
|
||||
|
||||
tests() {
|
||||
export REDASH_DATABASE_URL="postgresql://postgres@postgres/tests"
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
TEST_ARGS=tests/
|
||||
else
|
||||
@@ -101,22 +160,10 @@ case "$1" in
|
||||
shift
|
||||
scheduler
|
||||
;;
|
||||
dev_scheduler)
|
||||
shift
|
||||
dev_scheduler
|
||||
;;
|
||||
dev_worker)
|
||||
shift
|
||||
dev_worker
|
||||
;;
|
||||
celery_healthcheck)
|
||||
shift
|
||||
echo "DEPRECATED: Celery has been replaced with RQ and now performs healthchecks autonomously as part of the 'worker' entrypoint."
|
||||
;;
|
||||
dev_server)
|
||||
export FLASK_DEBUG=1
|
||||
exec /app/manage.py runserver --debugger --reload -h 0.0.0.0
|
||||
;;
|
||||
debug)
|
||||
export FLASK_DEBUG=1
|
||||
export REMOTE_DEBUG=1
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -o errexit # fail the build if any task fails
|
||||
|
||||
flake8 --version ; pip --version
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
254
bin/upgrade
@@ -1,254 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
from collections import namedtuple
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import requests
|
||||
|
||||
try:
|
||||
import semver
|
||||
except ImportError:
|
||||
print("Missing required library: semver.")
|
||||
exit(1)
|
||||
|
||||
REDASH_HOME = os.environ.get("REDASH_HOME", "/opt/redash")
|
||||
CURRENT_VERSION_PATH = "{}/current".format(REDASH_HOME)
|
||||
|
||||
|
||||
def run(cmd, cwd=None):
|
||||
if not cwd:
|
||||
cwd = REDASH_HOME
|
||||
|
||||
return subprocess.check_output(cmd, cwd=cwd, shell=True, stderr=subprocess.STDOUT)
|
||||
|
||||
|
||||
def confirm(question):
|
||||
reply = str(input(question + " (y/n): ")).lower().strip()
|
||||
|
||||
if reply[0] == "y":
|
||||
return True
|
||||
if reply[0] == "n":
|
||||
return False
|
||||
else:
|
||||
return confirm("Please use 'y' or 'n'")
|
||||
|
||||
|
||||
def version_path(version_name):
|
||||
return "{}/{}".format(REDASH_HOME, version_name)
|
||||
|
||||
|
||||
END_CODE = "\033[0m"
|
||||
|
||||
|
||||
def colored_string(text, color):
|
||||
if sys.stdout.isatty():
|
||||
return "{}{}{}".format(color, text, END_CODE)
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
def h1(text):
|
||||
print(colored_string(text, "\033[4m\033[1m"))
|
||||
|
||||
|
||||
def green(text):
|
||||
print(colored_string(text, "\033[92m"))
|
||||
|
||||
|
||||
def red(text):
|
||||
print(colored_string(text, "\033[91m"))
|
||||
|
||||
|
||||
class Release(namedtuple("Release", ("version", "download_url", "filename", "description"))):
|
||||
def v1_or_newer(self):
|
||||
return semver.compare(self.version, "1.0.0-alpha") >= 0
|
||||
|
||||
def is_newer(self, version):
|
||||
return semver.compare(self.version, version) > 0
|
||||
|
||||
@property
|
||||
def version_name(self):
|
||||
return self.filename.replace(".tar.gz", "")
|
||||
|
||||
|
||||
def get_latest_release_from_ci():
|
||||
response = requests.get(
|
||||
"https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts?branch=master"
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
exit("Failed getting releases (status code: %s)." % response.status_code)
|
||||
|
||||
tarball_asset = filter(lambda asset: asset["url"].endswith(".tar.gz"), response.json())[0]
|
||||
filename = urllib.unquote(tarball_asset["pretty_path"].split("/")[-1])
|
||||
version = filename.replace("redash.", "").replace(".tar.gz", "")
|
||||
|
||||
release = Release(version, tarball_asset["url"], filename, "")
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def get_release(channel):
|
||||
if channel == "ci":
|
||||
return get_latest_release_from_ci()
|
||||
|
||||
response = requests.get("https://version.redash.io/api/releases?channel={}".format(channel))
|
||||
release = response.json()[0]
|
||||
|
||||
filename = release["download_url"].split("/")[-1]
|
||||
release = Release(release["version"], release["download_url"], filename, release["description"])
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def link_to_current(version_name):
|
||||
green("Linking to current version...")
|
||||
run("ln -nfs {} {}".format(version_path(version_name), CURRENT_VERSION_PATH))
|
||||
|
||||
|
||||
def restart_services():
|
||||
# We're doing this instead of simple 'supervisorctl restart all' because
|
||||
# otherwise it won't notice that /opt/redash/current pointing at a different
|
||||
# directory.
|
||||
green("Restarting...")
|
||||
try:
|
||||
run("sudo /etc/init.d/redash_supervisord restart")
|
||||
except subprocess.CalledProcessError as e:
|
||||
run("sudo service supervisor restart")
|
||||
|
||||
|
||||
def update_requirements(version_name):
|
||||
green("Installing new Python packages (if needed)...")
|
||||
new_requirements_file = "{}/requirements.txt".format(version_path(version_name))
|
||||
|
||||
install_requirements = False
|
||||
|
||||
try:
|
||||
run("diff {}/requirements.txt {}".format(CURRENT_VERSION_PATH, new_requirements_file)) != 0
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode != 0:
|
||||
install_requirements = True
|
||||
|
||||
if install_requirements:
|
||||
run("sudo pip install -r {}".format(new_requirements_file))
|
||||
|
||||
|
||||
def apply_migrations(release):
|
||||
green("Running migrations (if needed)...")
|
||||
if not release.v1_or_newer():
|
||||
return apply_migrations_pre_v1(release.version_name)
|
||||
|
||||
run("sudo -u redash bin/run ./manage.py db upgrade", cwd=version_path(release.version_name))
|
||||
|
||||
|
||||
def find_migrations(version_name):
|
||||
current_migrations = set(
|
||||
[f for f in os.listdir("{}/migrations".format(CURRENT_VERSION_PATH)) if fnmatch(f, "*_*.py")]
|
||||
)
|
||||
new_migrations = sorted(
|
||||
[f for f in os.listdir("{}/migrations".format(version_path(version_name))) if fnmatch(f, "*_*.py")]
|
||||
)
|
||||
|
||||
return [m for m in new_migrations if m not in current_migrations]
|
||||
|
||||
|
||||
def apply_migrations_pre_v1(version_name):
|
||||
new_migrations = find_migrations(version_name)
|
||||
|
||||
if new_migrations:
|
||||
green("New migrations to run: ")
|
||||
print(", ".join(new_migrations))
|
||||
else:
|
||||
print("No new migrations in this version.")
|
||||
|
||||
if new_migrations and confirm("Apply new migrations? (make sure you have backup)"):
|
||||
for migration in new_migrations:
|
||||
print("Applying {}...".format(migration))
|
||||
run(
|
||||
"sudo sudo -u redash PYTHONPATH=. bin/run python migrations/{}".format(migration),
|
||||
cwd=version_path(version_name),
|
||||
)
|
||||
|
||||
|
||||
def download_and_unpack(release):
|
||||
directory_name = release.version_name
|
||||
|
||||
green("Downloading release tarball...")
|
||||
run(
|
||||
'sudo wget --header="Accept: application/octet-stream" -O {} {}'.format(release.filename, release.download_url)
|
||||
)
|
||||
green("Unpacking to: {}...".format(directory_name))
|
||||
run("sudo mkdir -p {}".format(directory_name))
|
||||
run("sudo tar -C {} -xvf {}".format(directory_name, release.filename))
|
||||
|
||||
green("Changing ownership to redash...")
|
||||
run("sudo chown redash {}".format(directory_name))
|
||||
|
||||
green("Linking .env file...")
|
||||
run("sudo ln -nfs {}/.env {}/.env".format(REDASH_HOME, version_path(directory_name)))
|
||||
|
||||
|
||||
def current_version():
|
||||
real_current_path = os.path.realpath(CURRENT_VERSION_PATH).replace(".b", "+b")
|
||||
return real_current_path.replace(REDASH_HOME + "/", "").replace("redash.", "")
|
||||
|
||||
|
||||
def verify_minimum_version():
|
||||
green("Current version: " + current_version())
|
||||
if semver.compare(current_version(), "0.12.0") < 0:
|
||||
red("You need to have Redash v0.12.0 or newer to upgrade to post v1.0.0 releases.")
|
||||
green("To upgrade to v0.12.0, run the upgrade script set to the legacy channel (--channel legacy).")
|
||||
exit(1)
|
||||
|
||||
|
||||
def show_description_and_confirm(description):
|
||||
if description:
|
||||
print(description)
|
||||
|
||||
if not confirm("Continue with upgrade?"):
|
||||
red("Cancelling upgrade.")
|
||||
exit(1)
|
||||
|
||||
|
||||
def verify_newer_version(release):
|
||||
if not release.is_newer(current_version()):
|
||||
red("The found release is not newer than your current deployed release ({}).".format(current_version()))
|
||||
if not confirm("Continue with upgrade?"):
|
||||
red("Cancelling upgrade.")
|
||||
exit(1)
|
||||
|
||||
|
||||
def deploy_release(channel):
|
||||
h1("Starting Redash upgrade:")
|
||||
|
||||
release = get_release(channel)
|
||||
green("Found version: {}".format(release.version))
|
||||
|
||||
if release.v1_or_newer():
|
||||
verify_minimum_version()
|
||||
|
||||
verify_newer_version(release)
|
||||
show_description_and_confirm(release.description)
|
||||
|
||||
try:
|
||||
download_and_unpack(release)
|
||||
update_requirements(release.version_name)
|
||||
apply_migrations(release)
|
||||
link_to_current(release.version_name)
|
||||
restart_services()
|
||||
green("Done! Enjoy.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
red("Failed running: {}".format(e.cmd))
|
||||
red("Exit status: {}\nOutput:\n{}".format(e.returncode, e.output))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--channel", help="The channel to get release from (default: stable).", default="stable")
|
||||
args = parser.parse_args()
|
||||
|
||||
deploy_release(args.channel)
|
||||
BIN
client/app/assets/images/db-logos/e6data.png
Normal file
|
After Width: | Height: | Size: 5.2 KiB |
BIN
client/app/assets/images/db-logos/influxdbv2.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 2.4 KiB |
BIN
client/app/assets/images/db-logos/risingwave.png
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
BIN
client/app/assets/images/db-logos/tinybird.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
client/app/assets/images/db-logos/yandex_disk.png
Normal file
|
After Width: | Height: | Size: 8.5 KiB |
BIN
client/app/assets/images/destinations/datadog.png
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
client/app/assets/images/destinations/webex.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
@@ -223,6 +223,7 @@ body.fixed-layout {
|
||||
}
|
||||
|
||||
.editor__left__schema {
|
||||
min-height: 120px;
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import React from "react";
|
||||
import Link from "@/components/Link";
|
||||
import { clientConfig, currentUser } from "@/services/auth";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import frontendVersion from "@/version.json";
|
||||
|
||||
export default function VersionInfo() {
|
||||
@@ -10,15 +9,6 @@ export default function VersionInfo() {
|
||||
Version: {clientConfig.version}
|
||||
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
|
||||
</div>
|
||||
{clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
|
||||
<div className="m-t-10">
|
||||
{/* eslint-disable react/jsx-no-target-blank */}
|
||||
<Link href="https://version.redash.io/" className="update-available" target="_blank" rel="noopener">
|
||||
Update Available <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
import React, { useState } from "react";
|
||||
import Card from "antd/lib/card";
|
||||
import Button from "antd/lib/button";
|
||||
import Typography from "antd/lib/typography";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import OrgSettings from "@/services/organizationSettings";
|
||||
|
||||
const Text = Typography.Text;
|
||||
|
||||
function BeaconConsent() {
|
||||
const [hide, setHide] = useState(false);
|
||||
|
||||
if (!clientConfig.showBeaconConsentMessage || hide) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hideConsentCard = () => {
|
||||
clientConfig.showBeaconConsentMessage = false;
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = confirm => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
message = "Settings Saved.";
|
||||
}
|
||||
|
||||
OrgSettings.save({ beacon_consent: confirm }, message)
|
||||
// .then(() => {
|
||||
// // const settings = get(response, 'settings');
|
||||
// // this.setState({ settings, formValues: { ...settings } });
|
||||
// })
|
||||
.finally(hideConsentCard);
|
||||
};
|
||||
|
||||
return (
|
||||
<DynamicComponent name="BeaconConsent">
|
||||
<div className="m-t-10 tiled">
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
Would you be ok with sharing anonymous usage data with the Redash team?{" "}
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
<li> Number of users, queries, dashboards, alerts, widgets and visualizations.</li>
|
||||
<li> Types of data sources, alert destinations and visualizations.</li>
|
||||
</ul>
|
||||
</div>
|
||||
<Text>All data is aggregated and will never include any sensitive or private data.</Text>
|
||||
<div className="m-t-5">
|
||||
<Button type="primary" className="m-r-5" onClick={() => confirmConsent(true)}>
|
||||
Yes
|
||||
</Button>
|
||||
<Button type="default" onClick={() => confirmConsent(false)}>
|
||||
No
|
||||
</Button>
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the{" "}
|
||||
<Link href="settings/organization">Organization Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
export default BeaconConsent;
|
||||
@@ -23,7 +23,6 @@ export const TYPES = mapValues(
|
||||
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
|
||||
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
|
||||
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
|
||||
USAGE_DATA_SHARING: ["/open-source/admin-guide/usage-data", "Help: Anonymous Usage Data Sharing"],
|
||||
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
|
||||
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
|
||||
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { size, filter, forEach, extend } from "lodash";
|
||||
import { size, filter, forEach, extend, isEmpty } from "lodash";
|
||||
import React from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import { SortableContainer, SortableElement, DragHandle } from "@redash/viz/lib/components/sortable";
|
||||
@@ -43,13 +43,23 @@ export default class Parameters extends React.Component {
|
||||
appendSortableToParent: true,
|
||||
};
|
||||
|
||||
toCamelCase = str => {
|
||||
if (isEmpty(str)) {
|
||||
return "";
|
||||
}
|
||||
return str.replace(/\s+/g, "").toLowerCase();
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
const { parameters } = props;
|
||||
const { parameters, disableUrlUpdate } = props;
|
||||
this.state = { parameters };
|
||||
if (!props.disableUrlUpdate) {
|
||||
if (!disableUrlUpdate) {
|
||||
updateUrl(parameters);
|
||||
}
|
||||
const hideRegex = /hide_filter=([^&]+)/g;
|
||||
const matches = window.location.search.matchAll(hideRegex);
|
||||
this.hideValues = Array.from(matches, match => match[1]);
|
||||
}
|
||||
|
||||
componentDidUpdate = prevProps => {
|
||||
@@ -122,7 +132,13 @@ export default class Parameters extends React.Component {
|
||||
};
|
||||
|
||||
renderParameter(param, index) {
|
||||
if (this.hideValues.some(value => this.toCamelCase(value) === this.toCamelCase(param.name))) {
|
||||
return null;
|
||||
}
|
||||
const { editable } = this.props;
|
||||
if (param.hidden) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div key={param.name} className="di-block" data-test={`ParameterName-${param.name}`}>
|
||||
<div className="parameter-heading">
|
||||
@@ -138,6 +154,7 @@ export default class Parameters extends React.Component {
|
||||
</PlainButton>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<ParameterValueInput
|
||||
type={param.type}
|
||||
value={param.normalizedValue}
|
||||
@@ -154,7 +171,6 @@ export default class Parameters extends React.Component {
|
||||
const { parameters } = this.state;
|
||||
const { sortable, appendSortableToParent } = this.props;
|
||||
const dirtyParamCount = size(filter(parameters, "hasPendingValue"));
|
||||
|
||||
return (
|
||||
<SortableContainer
|
||||
disabled={!sortable}
|
||||
@@ -169,17 +185,18 @@ export default class Parameters extends React.Component {
|
||||
className: "parameter-container",
|
||||
onKeyDown: dirtyParamCount ? this.handleKeyDown : null,
|
||||
}}>
|
||||
{parameters.map((param, index) => (
|
||||
<SortableElement key={param.name} index={index}>
|
||||
<div
|
||||
className="parameter-block"
|
||||
data-editable={sortable || null}
|
||||
data-test={`ParameterBlock-${param.name}`}>
|
||||
{sortable && <DragHandle data-test={`DragHandle-${param.name}`} />}
|
||||
{this.renderParameter(param, index)}
|
||||
</div>
|
||||
</SortableElement>
|
||||
))}
|
||||
{parameters &&
|
||||
parameters.map((param, index) => (
|
||||
<SortableElement key={param.name} index={index}>
|
||||
<div
|
||||
className="parameter-block"
|
||||
data-editable={sortable || null}
|
||||
data-test={`ParameterBlock-${param.name}`}>
|
||||
{sortable && <DragHandle data-test={`DragHandle-${param.name}`} />}
|
||||
{this.renderParameter(param, index)}
|
||||
</div>
|
||||
</SortableElement>
|
||||
))}
|
||||
<ParameterApplyButton onClick={this.applyChanges} paramCount={dirtyParamCount} />
|
||||
</SortableContainer>
|
||||
);
|
||||
|
||||
@@ -123,6 +123,7 @@
|
||||
right: 10px;
|
||||
bottom: 15px;
|
||||
height: auto;
|
||||
overflow: hidden;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import PlainButton from "@/components/PlainButton";
|
||||
import ExpandedWidgetDialog from "@/components/dashboards/ExpandedWidgetDialog";
|
||||
import EditParameterMappingsDialog from "@/components/dashboards/EditParameterMappingsDialog";
|
||||
import VisualizationRenderer from "@/components/visualizations/VisualizationRenderer";
|
||||
import { ExecutionStatus } from "@/services/query-result";
|
||||
|
||||
import Widget from "./Widget";
|
||||
|
||||
@@ -278,7 +279,7 @@ class VisualizationWidget extends React.Component {
|
||||
const widgetQueryResult = widget.getQueryResult();
|
||||
const widgetStatus = widgetQueryResult && widgetQueryResult.getStatus();
|
||||
switch (widgetStatus) {
|
||||
case "failed":
|
||||
case ExecutionStatus.FAILED:
|
||||
return (
|
||||
<div className="body-row-auto scrollbox">
|
||||
{widgetQueryResult.getError() && (
|
||||
@@ -288,7 +289,7 @@ class VisualizationWidget extends React.Component {
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
case "done":
|
||||
case ExecutionStatus.FINISHED:
|
||||
return (
|
||||
<div className="body-row-auto scrollbox">
|
||||
<VisualizationRenderer
|
||||
|
||||
@@ -16,6 +16,7 @@ import LoadingState from "../items-list/components/LoadingState";
|
||||
const SchemaItemColumnType = PropTypes.shape({
|
||||
name: PropTypes.string.isRequired,
|
||||
type: PropTypes.string,
|
||||
comment: PropTypes.string,
|
||||
});
|
||||
|
||||
export const SchemaItemType = PropTypes.shape({
|
||||
@@ -47,13 +48,30 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
|
||||
return (
|
||||
<div {...props}>
|
||||
<div className="schema-list-item">
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
{item.description ? (
|
||||
<Tooltip
|
||||
title={item.description}
|
||||
mouseEnterDelay={0}
|
||||
mouseLeaveDelay={0}
|
||||
placement="right"
|
||||
arrowPointAtCenter>
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
)}
|
||||
<Tooltip
|
||||
title="Insert table name into query text"
|
||||
mouseEnterDelay={0}
|
||||
@@ -73,22 +91,34 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
|
||||
map(item.columns, column => {
|
||||
const columnName = get(column, "name");
|
||||
const columnType = get(column, "type");
|
||||
return (
|
||||
<Tooltip
|
||||
title="Insert column name into query text"
|
||||
mouseEnterDelay={0}
|
||||
mouseLeaveDelay={0}
|
||||
placement="rightTop">
|
||||
<PlainButton key={columnName} className="table-open-item" onClick={e => handleSelect(e, columnName)}>
|
||||
<div>
|
||||
{columnName} {columnType && <span className="column-type">{columnType}</span>}
|
||||
</div>
|
||||
const columnComment = get(column, "comment");
|
||||
if (columnComment) {
|
||||
return (
|
||||
<Tooltip title={columnComment} mouseEnterDelay={0} mouseLeaveDelay={0} placement="rightTop">
|
||||
<PlainButton
|
||||
key={columnName}
|
||||
className="table-open-item"
|
||||
onClick={e => handleSelect(e, columnName)}>
|
||||
<div>
|
||||
{columnName} {columnType && <span className="column-type">{columnType}</span>}
|
||||
</div>
|
||||
|
||||
<div className="copy-to-editor">
|
||||
<i className="fa fa-angle-double-right" aria-hidden="true" />
|
||||
</div>
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
<div className="copy-to-editor">
|
||||
<i className="fa fa-angle-double-right" aria-hidden="true" />
|
||||
</div>
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<PlainButton key={columnName} className="table-open-item" onClick={e => handleSelect(e, columnName)}>
|
||||
<div>
|
||||
{columnName} {columnType && <span className="column-type">{columnType}</span>}
|
||||
</div>
|
||||
<div className="copy-to-editor">
|
||||
<i className="fa fa-angle-double-right" aria-hidden="true" />
|
||||
</div>
|
||||
</PlainButton>
|
||||
);
|
||||
})
|
||||
)}
|
||||
|
||||
@@ -148,7 +148,9 @@ function EditVisualizationDialog({ dialog, visualization, query, queryResult })
|
||||
|
||||
function dismiss() {
|
||||
const optionsChanged = !isEqual(options, defaultState.originalOptions);
|
||||
confirmDialogClose(nameChanged || optionsChanged).then(dialog.dismiss);
|
||||
confirmDialogClose(nameChanged || optionsChanged)
|
||||
.then(dialog.dismiss)
|
||||
.catch(() => {});
|
||||
}
|
||||
|
||||
// When editing existing visualization chart type selector is disabled, so add only existing visualization's
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<base href="{{base_href}}" />
|
||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||
<script src="/static/unsupportedRedirect.js" async></script>
|
||||
<script src="<%= htmlWebpackPlugin.options.staticPath %>unsupportedRedirect.js" async></script>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/images/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/static/images/favicon-96x96.png" />
|
||||
|
||||
@@ -119,6 +119,8 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
|
||||
managePermissions,
|
||||
gridDisabled,
|
||||
isDashboardOwnerOrAdmin,
|
||||
isDuplicating,
|
||||
duplicateDashboard,
|
||||
} = dashboardConfiguration;
|
||||
|
||||
const archive = () => {
|
||||
@@ -142,6 +144,14 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
|
||||
<Menu.Item className={cx({ hidden: gridDisabled })}>
|
||||
<PlainButton onClick={() => setEditingLayout(true)}>Edit</PlainButton>
|
||||
</Menu.Item>
|
||||
{!isDuplicating && dashboard.canEdit() && (
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={duplicateDashboard}>
|
||||
Fork <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</PlainButton>
|
||||
</Menu.Item>
|
||||
)}
|
||||
{clientConfig.showPermissionsControl && isDashboardOwnerOrAdmin && (
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={managePermissions}>Manage Permissions</PlainButton>
|
||||
|
||||
@@ -94,12 +94,12 @@ class ShareDashboardDialog extends React.Component {
|
||||
};
|
||||
|
||||
render() {
|
||||
const { dialog, dashboard } = this.props;
|
||||
|
||||
const { dialog, dashboard, hasOnlySafeQueries } = this.props;
|
||||
const headerContent = this.constructor.headerContent;
|
||||
return (
|
||||
<Modal {...dialog.props} title={this.constructor.headerContent} footer={null}>
|
||||
<Modal {...dialog.props} title={headerContent} footer={null}>
|
||||
<Form layout="horizontal">
|
||||
{!this.props.hasOnlySafeQueries && (
|
||||
{!hasOnlySafeQueries && (
|
||||
<Form.Item>
|
||||
<Alert
|
||||
message="For your security, sharing is currently not supported for dashboards containing queries with text parameters. Consider changing the text parameters in your query to a different type."
|
||||
@@ -107,6 +107,7 @@ class ShareDashboardDialog extends React.Component {
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
|
||||
<Form.Item label="Allow public access" {...this.formItemProps}>
|
||||
<Switch
|
||||
checked={dashboard.publicAccessEnabled}
|
||||
|
||||
@@ -15,6 +15,7 @@ import ShareDashboardDialog from "../components/ShareDashboardDialog";
|
||||
import useFullscreenHandler from "../../../lib/hooks/useFullscreenHandler";
|
||||
import useRefreshRateHandler from "./useRefreshRateHandler";
|
||||
import useEditModeHandler from "./useEditModeHandler";
|
||||
import useDuplicateDashboard from "./useDuplicateDashboard";
|
||||
import { policy } from "@/services/policy";
|
||||
|
||||
export { DashboardStatusEnum } from "./useEditModeHandler";
|
||||
@@ -53,6 +54,8 @@ function useDashboard(dashboardData) {
|
||||
[dashboard]
|
||||
);
|
||||
|
||||
const [isDuplicating, duplicateDashboard] = useDuplicateDashboard(dashboard);
|
||||
|
||||
const managePermissions = useCallback(() => {
|
||||
const aclUrl = `api/dashboards/${dashboard.id}/acl`;
|
||||
PermissionsEditorDialog.showModal({
|
||||
@@ -243,6 +246,8 @@ function useDashboard(dashboardData) {
|
||||
showAddTextboxDialog,
|
||||
showAddWidgetDialog,
|
||||
managePermissions,
|
||||
isDuplicating,
|
||||
duplicateDashboard,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
40
client/app/pages/dashboards/hooks/useDuplicateDashboard.js
Normal file
@@ -0,0 +1,40 @@
|
||||
import { noop, extend, pick } from "lodash";
|
||||
import { useCallback, useState } from "react";
|
||||
import url from "url";
|
||||
import qs from "query-string";
|
||||
import { Dashboard } from "@/services/dashboard";
|
||||
|
||||
function keepCurrentUrlParams(targetUrl) {
|
||||
const currentUrlParams = qs.parse(window.location.search);
|
||||
targetUrl = url.parse(targetUrl);
|
||||
const targetUrlParams = qs.parse(targetUrl.search);
|
||||
return url.format(
|
||||
extend(pick(targetUrl, ["protocol", "auth", "host", "pathname"]), {
|
||||
search: qs.stringify(extend(currentUrlParams, targetUrlParams)),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export default function useDuplicateDashboard(dashboard) {
|
||||
const [isDuplicating, setIsDuplicating] = useState(false);
|
||||
|
||||
const duplicateDashboard = useCallback(() => {
|
||||
// To prevent opening the same tab, name must be unique for each browser
|
||||
const tabName = `duplicatedDashboardTab/${Math.random().toString()}`;
|
||||
|
||||
// We should open tab here because this moment is a part of user interaction;
|
||||
// later browser will block such attempts
|
||||
const tab = window.open("", tabName);
|
||||
|
||||
setIsDuplicating(true);
|
||||
Dashboard.fork({ id: dashboard.id })
|
||||
.then(newDashboard => {
|
||||
tab.location = keepCurrentUrlParams(newDashboard.getUrl());
|
||||
})
|
||||
.finally(() => {
|
||||
setIsDuplicating(false);
|
||||
});
|
||||
}, [dashboard.id]);
|
||||
|
||||
return [isDuplicating, isDuplicating ? noop : duplicateDashboard];
|
||||
}
|
||||
@@ -6,7 +6,6 @@ import Link from "@/components/Link";
|
||||
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
||||
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import BeaconConsent from "@/components/BeaconConsent";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
import { axios } from "@/services/axios";
|
||||
@@ -89,7 +88,6 @@ export default function Home() {
|
||||
</DynamicComponent>
|
||||
<DynamicComponent name="HomeExtra" />
|
||||
<DashboardAndQueryFavoritesList />
|
||||
<BeaconConsent />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -380,7 +380,9 @@ function QuerySource(props) {
|
||||
<QueryVisualizationTabs
|
||||
queryResult={queryResult}
|
||||
visualizations={query.visualizations}
|
||||
showNewVisualizationButton={queryFlags.canEdit && queryResultData.status === ExecutionStatus.DONE}
|
||||
showNewVisualizationButton={
|
||||
queryFlags.canEdit && queryResultData.status === ExecutionStatus.FINISHED
|
||||
}
|
||||
canDeleteVisualizations={queryFlags.canEdit}
|
||||
selectedTab={selectedVisualization}
|
||||
onChangeTab={setSelectedVisualization}
|
||||
|
||||
@@ -37,9 +37,10 @@
|
||||
|
||||
&.active {
|
||||
overflow: visible;
|
||||
max-height: unset !important;
|
||||
.ant-input {
|
||||
resize: vertical;
|
||||
max-height: 150px - 15px * 2;
|
||||
height: 30vh;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,7 +165,7 @@ function QueryView(props) {
|
||||
<QueryVisualizationTabs
|
||||
queryResult={queryResult}
|
||||
visualizations={query.visualizations}
|
||||
showNewVisualizationButton={queryFlags.canEdit && queryResultData.status === ExecutionStatus.DONE}
|
||||
showNewVisualizationButton={queryFlags.canEdit && queryResultData.status === ExecutionStatus.FINISHED}
|
||||
canDeleteVisualizations={queryFlags.canEdit}
|
||||
selectedTab={selectedVisualization}
|
||||
onChangeTab={setSelectedVisualization}
|
||||
|
||||
@@ -1,37 +1,45 @@
|
||||
import { includes } from "lodash";
|
||||
import React from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import Alert from "antd/lib/alert";
|
||||
import Button from "antd/lib/button";
|
||||
import Timer from "@/components/Timer";
|
||||
import { ExecutionStatus } from "@/services/query-result";
|
||||
|
||||
export default function QueryExecutionStatus({ status, updatedAt, error, isCancelling, onCancel }) {
|
||||
const alertType = status === "failed" ? "error" : "info";
|
||||
const showTimer = status !== "failed" && updatedAt;
|
||||
const isCancelButtonAvailable = includes(["waiting", "processing"], status);
|
||||
const alertType = status === ExecutionStatus.FAILED ? "error" : "info";
|
||||
const showTimer = status !== ExecutionStatus.FAILED && updatedAt;
|
||||
const isCancelButtonAvailable = [
|
||||
ExecutionStatus.SCHEDULED,
|
||||
ExecutionStatus.QUEUED,
|
||||
ExecutionStatus.STARTED,
|
||||
ExecutionStatus.DEFERRED,
|
||||
].includes(status);
|
||||
let message = isCancelling ? <React.Fragment>Cancelling…</React.Fragment> : null;
|
||||
|
||||
switch (status) {
|
||||
case "waiting":
|
||||
case ExecutionStatus.QUEUED:
|
||||
if (!isCancelling) {
|
||||
message = <React.Fragment>Query in queue…</React.Fragment>;
|
||||
}
|
||||
break;
|
||||
case "processing":
|
||||
case ExecutionStatus.STARTED:
|
||||
if (!isCancelling) {
|
||||
message = <React.Fragment>Executing query…</React.Fragment>;
|
||||
}
|
||||
break;
|
||||
case "loading-result":
|
||||
case ExecutionStatus.LOADING_RESULT:
|
||||
message = <React.Fragment>Loading results…</React.Fragment>;
|
||||
break;
|
||||
case "failed":
|
||||
case ExecutionStatus.FAILED:
|
||||
message = (
|
||||
<React.Fragment>
|
||||
Error running query: <strong>{error}</strong>
|
||||
</React.Fragment>
|
||||
);
|
||||
break;
|
||||
case ExecutionStatus.CANCELED:
|
||||
message = <React.Fragment>Query was canceled</React.Fragment>;
|
||||
break;
|
||||
// no default
|
||||
}
|
||||
|
||||
@@ -66,7 +74,7 @@ QueryExecutionStatus.propTypes = {
|
||||
};
|
||||
|
||||
QueryExecutionStatus.defaultProps = {
|
||||
status: "waiting",
|
||||
status: ExecutionStatus.QUEUED,
|
||||
updatedAt: null,
|
||||
error: null,
|
||||
isCancelling: true,
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
import React from "react";
|
||||
import Form from "antd/lib/form";
|
||||
import Checkbox from "antd/lib/checkbox";
|
||||
import Skeleton from "antd/lib/skeleton";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
|
||||
|
||||
export default function BeaconConsentSettings(props) {
|
||||
const { values, onChange, loading } = props;
|
||||
|
||||
return (
|
||||
<DynamicComponent name="OrganizationSettings.BeaconConsentSettings" {...props}>
|
||||
<Form.Item
|
||||
label={
|
||||
<span>
|
||||
Anonymous Usage Data Sharing
|
||||
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||
</span>
|
||||
}>
|
||||
{loading ? (
|
||||
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||
) : (
|
||||
<Checkbox
|
||||
name="beacon_consent"
|
||||
checked={values.beacon_consent}
|
||||
onChange={e => onChange({ beacon_consent: e.target.checked })}>
|
||||
Help Redash improve by automatically sending anonymous usage data
|
||||
</Checkbox>
|
||||
)}
|
||||
</Form.Item>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
|
||||
|
||||
BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;
|
||||
@@ -4,7 +4,6 @@ import DynamicComponent from "@/components/DynamicComponent";
|
||||
import FormatSettings from "./FormatSettings";
|
||||
import PlotlySettings from "./PlotlySettings";
|
||||
import FeatureFlagsSettings from "./FeatureFlagsSettings";
|
||||
import BeaconConsentSettings from "./BeaconConsentSettings";
|
||||
|
||||
export default function GeneralSettings(props) {
|
||||
return (
|
||||
@@ -14,7 +13,6 @@ export default function GeneralSettings(props) {
|
||||
<FormatSettings {...props} />
|
||||
<PlotlySettings {...props} />
|
||||
<FeatureFlagsSettings {...props} />
|
||||
<BeaconConsentSettings {...props} />
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -172,6 +172,7 @@ const DashboardService = {
|
||||
favorites: params => axios.get("api/dashboards/favorites", { params }).then(transformResponse),
|
||||
favorite: ({ id }) => axios.post(`api/dashboards/${id}/favorite`),
|
||||
unfavorite: ({ id }) => axios.delete(`api/dashboards/${id}/favorite`),
|
||||
fork: ({ id }) => axios.post(`api/dashboards/${id}/fork`, { id }).then(transformResponse),
|
||||
};
|
||||
|
||||
_.extend(Dashboard, DashboardService);
|
||||
@@ -265,3 +266,7 @@ Dashboard.prototype.favorite = function favorite() {
|
||||
Dashboard.prototype.unfavorite = function unfavorite() {
|
||||
return Dashboard.unfavorite(this);
|
||||
};
|
||||
|
||||
Dashboard.prototype.getUrl = function getUrl() {
|
||||
return urlForDashboard(this);
|
||||
};
|
||||
|
||||
@@ -50,18 +50,15 @@ const QueryResultResource = {
|
||||
};
|
||||
|
||||
export const ExecutionStatus = {
|
||||
WAITING: "waiting",
|
||||
PROCESSING: "processing",
|
||||
DONE: "done",
|
||||
QUEUED: "queued",
|
||||
STARTED: "started",
|
||||
FINISHED: "finished",
|
||||
FAILED: "failed",
|
||||
LOADING_RESULT: "loading-result",
|
||||
};
|
||||
|
||||
const statuses = {
|
||||
1: ExecutionStatus.WAITING,
|
||||
2: ExecutionStatus.PROCESSING,
|
||||
3: ExecutionStatus.DONE,
|
||||
4: ExecutionStatus.FAILED,
|
||||
CANCELED: "canceled",
|
||||
DEFERRED: "deferred",
|
||||
SCHEDULED: "scheduled",
|
||||
STOPPED: "stopped",
|
||||
};
|
||||
|
||||
function handleErrorResponse(queryResult, error) {
|
||||
@@ -80,7 +77,7 @@ function handleErrorResponse(queryResult, error) {
|
||||
queryResult.update({
|
||||
job: {
|
||||
error: "cached query result unavailable, please execute again.",
|
||||
status: 4,
|
||||
status: ExecutionStatus.FAILED,
|
||||
},
|
||||
});
|
||||
return;
|
||||
@@ -91,7 +88,7 @@ function handleErrorResponse(queryResult, error) {
|
||||
queryResult.update({
|
||||
job: {
|
||||
error: get(error, "response.data.message", "Unknown error occurred. Please try again later."),
|
||||
status: 4,
|
||||
status: ExecutionStatus.FAILED,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -102,11 +99,19 @@ function sleep(ms) {
|
||||
|
||||
export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
return axios.get(`api/jobs/${jobId}`).then(data => {
|
||||
const status = statuses[data.job.status];
|
||||
if (status === ExecutionStatus.WAITING || status === ExecutionStatus.PROCESSING) {
|
||||
const status = data.job.status;
|
||||
if (
|
||||
[ExecutionStatus.QUEUED, ExecutionStatus.STARTED, ExecutionStatus.SCHEDULED, ExecutionStatus.DEFERRED].includes(
|
||||
status
|
||||
)
|
||||
) {
|
||||
return sleep(interval).then(() => fetchDataFromJob(data.job.id));
|
||||
} else if (status === ExecutionStatus.DONE) {
|
||||
return data.job.result;
|
||||
} else if (status === ExecutionStatus.FINISHED) {
|
||||
return data.job.result_id;
|
||||
} else if (status === ExecutionStatus.CANCELED) {
|
||||
return Promise.reject("Job was canceled");
|
||||
} else if (status === ExecutionStatus.STOPPED) {
|
||||
return Promise.reject("Job was stopped");
|
||||
} else if (status === ExecutionStatus.FAILED) {
|
||||
return Promise.reject(data.job.error);
|
||||
}
|
||||
@@ -122,7 +127,7 @@ class QueryResult {
|
||||
this.deferred = defer();
|
||||
this.job = {};
|
||||
this.query_result = {};
|
||||
this.status = "waiting";
|
||||
this.status = ExecutionStatus.QUEUED;
|
||||
|
||||
this.updatedAt = moment();
|
||||
|
||||
@@ -138,8 +143,8 @@ class QueryResult {
|
||||
extend(this, props);
|
||||
|
||||
if ("query_result" in props) {
|
||||
this.status = ExecutionStatus.DONE;
|
||||
this.deferred.onStatusChange(ExecutionStatus.DONE);
|
||||
this.status = ExecutionStatus.FINISHED;
|
||||
this.deferred.onStatusChange(ExecutionStatus.FINISHED);
|
||||
|
||||
const columnTypes = {};
|
||||
|
||||
@@ -183,11 +188,10 @@ class QueryResult {
|
||||
});
|
||||
|
||||
this.deferred.resolve(this);
|
||||
} else if (this.job.status === 3 || this.job.status === 2) {
|
||||
this.deferred.onStatusChange(ExecutionStatus.PROCESSING);
|
||||
this.status = "processing";
|
||||
} else if (this.job.status === 4) {
|
||||
this.status = statuses[this.job.status];
|
||||
} else if (this.job.status === ExecutionStatus.STARTED || this.job.status === ExecutionStatus.FINISHED) {
|
||||
this.status = ExecutionStatus.STARTED;
|
||||
} else if (this.job.status === ExecutionStatus.FAILED) {
|
||||
this.status = this.job.status;
|
||||
this.deferred.reject(new QueryResultError(this.job.error));
|
||||
} else {
|
||||
this.deferred.onStatusChange(undefined);
|
||||
@@ -211,7 +215,7 @@ class QueryResult {
|
||||
if (this.isLoadingResult) {
|
||||
return ExecutionStatus.LOADING_RESULT;
|
||||
}
|
||||
return this.status || statuses[this.job.status];
|
||||
return this.status || this.job.status;
|
||||
}
|
||||
|
||||
getError() {
|
||||
@@ -374,7 +378,7 @@ class QueryResult {
|
||||
this.isLoadingResult = true;
|
||||
this.deferred.onStatusChange(ExecutionStatus.LOADING_RESULT);
|
||||
|
||||
QueryResultResource.get({ id: this.job.query_result_id })
|
||||
QueryResultResource.get({ id: this.job.result_id })
|
||||
.then(response => {
|
||||
this.update(response);
|
||||
this.isLoadingResult = false;
|
||||
@@ -389,7 +393,7 @@ class QueryResult {
|
||||
this.update({
|
||||
job: {
|
||||
error: "failed communicating with server. Please check your Internet connection and try again.",
|
||||
status: 4,
|
||||
status: ExecutionStatus.FAILED,
|
||||
},
|
||||
});
|
||||
this.isLoadingResult = false;
|
||||
@@ -413,9 +417,9 @@ class QueryResult {
|
||||
.then(jobResponse => {
|
||||
this.update(jobResponse);
|
||||
|
||||
if (this.getStatus() === "processing" && this.job.query_result_id && this.job.query_result_id !== "None") {
|
||||
if (this.getStatus() === ExecutionStatus.STARTED && this.job.result_id && this.job.result_id !== "None") {
|
||||
loadResult();
|
||||
} else if (this.getStatus() !== "failed") {
|
||||
} else if (this.getStatus() !== ExecutionStatus.FAILED) {
|
||||
const waitTime = tryNumber > 10 ? 3000 : 500;
|
||||
setTimeout(() => {
|
||||
this.refreshStatus(query, parameters, tryNumber + 1);
|
||||
@@ -428,7 +432,7 @@ class QueryResult {
|
||||
this.update({
|
||||
job: {
|
||||
error: "failed communicating with server. Please check your Internet connection and try again.",
|
||||
status: 4,
|
||||
status: ExecutionStatus.FAILED,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import moment from "moment";
|
||||
import debug from "debug";
|
||||
import Mustache from "mustache";
|
||||
import { axios } from "@/services/axios";
|
||||
import { ExecutionStatus } from "@/services/query-result";
|
||||
import {
|
||||
zipObject,
|
||||
isEmpty,
|
||||
@@ -103,7 +104,7 @@ export class Query {
|
||||
return new QueryResult({
|
||||
job: {
|
||||
error: `missing ${valuesWord} for ${missingParams.join(", ")} ${paramsWord}.`,
|
||||
status: 4,
|
||||
status: ExecutionStatus.FAILED,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -360,7 +361,7 @@ export class QueryResultError {
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
getStatus() {
|
||||
return "failed";
|
||||
return ExecutionStatus.FAILED;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* eslint-disable import/no-extraneous-dependencies, no-console */
|
||||
const { find } = require("lodash");
|
||||
const atob = require("atob");
|
||||
const { execSync } = require("child_process");
|
||||
const { get, post } = require("request").defaults({ jar: true });
|
||||
const { seedData } = require("./seed-data");
|
||||
@@ -44,44 +43,32 @@ function seedDatabase(seedValues) {
|
||||
|
||||
function buildServer() {
|
||||
console.log("Building the server...");
|
||||
execSync("docker-compose -p cypress build", { stdio: "inherit" });
|
||||
execSync("docker compose build", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function startServer() {
|
||||
console.log("Starting the server...");
|
||||
execSync("docker-compose -p cypress up -d", { stdio: "inherit" });
|
||||
execSync("docker-compose -p cypress run server create_db", { stdio: "inherit" });
|
||||
execSync("docker compose up -d", { stdio: "inherit" });
|
||||
execSync("docker compose run server create_db", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function stopServer() {
|
||||
console.log("Stopping the server...");
|
||||
execSync("docker-compose -p cypress down", { stdio: "inherit" });
|
||||
execSync("docker compose down", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function runCypressCI() {
|
||||
const {
|
||||
PERCY_TOKEN_ENCODED,
|
||||
CYPRESS_PROJECT_ID_ENCODED,
|
||||
CYPRESS_RECORD_KEY_ENCODED,
|
||||
GITHUB_REPOSITORY,
|
||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||
} = process.env;
|
||||
|
||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
||||
if (PERCY_TOKEN_ENCODED) {
|
||||
process.env.PERCY_TOKEN = atob(`${PERCY_TOKEN_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_PROJECT_ID_ENCODED) {
|
||||
process.env.CYPRESS_PROJECT_ID = atob(`${CYPRESS_PROJECT_ID_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_RECORD_KEY_ENCODED) {
|
||||
process.env.CYPRESS_RECORD_KEY = atob(`${CYPRESS_RECORD_KEY_ENCODED}`);
|
||||
}
|
||||
process.env.CYPRESS_OPTIONS = "--record";
|
||||
}
|
||||
|
||||
execSync(
|
||||
"COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker-compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
"docker compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
}
|
||||
|
||||
@@ -53,11 +53,12 @@ describe("Dashboard Sharing", () => {
|
||||
};
|
||||
|
||||
const dashboardUrl = this.dashboardUrl;
|
||||
cy.createQuery({ options }).then(({ id: queryId }) => {
|
||||
cy.createQuery({ options }).then(({ id: queryId, name: queryName }) => {
|
||||
cy.visit(dashboardUrl);
|
||||
editDashboard();
|
||||
cy.getByTestId("AddWidgetButton").click();
|
||||
cy.getByTestId("AddWidgetDialog").within(() => {
|
||||
cy.get("input").type(queryName);
|
||||
cy.get(`.query-selector-result[data-test="QueryId${queryId}"]`).click();
|
||||
});
|
||||
cy.contains("button", "Add to Dashboard").click();
|
||||
@@ -178,11 +179,12 @@ describe("Dashboard Sharing", () => {
|
||||
};
|
||||
|
||||
const dashboardUrl = this.dashboardUrl;
|
||||
cy.createQuery({ options }).then(({ id: queryId }) => {
|
||||
cy.createQuery({ options }).then(({ id: queryId, name: queryName }) => {
|
||||
cy.visit(dashboardUrl);
|
||||
editDashboard();
|
||||
cy.getByTestId("AddWidgetButton").click();
|
||||
cy.getByTestId("AddWidgetDialog").within(() => {
|
||||
cy.get("input").type(queryName);
|
||||
cy.get(`.query-selector-result[data-test="QueryId${queryId}"]`).click();
|
||||
});
|
||||
cy.contains("button", "Add to Dashboard").click();
|
||||
|
||||
@@ -18,11 +18,12 @@ describe("Widget", () => {
|
||||
};
|
||||
|
||||
it("adds widget", function() {
|
||||
cy.createQuery().then(({ id: queryId }) => {
|
||||
cy.createQuery().then(({ id: queryId, name: queryName }) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
editDashboard();
|
||||
cy.getByTestId("AddWidgetButton").click();
|
||||
cy.getByTestId("AddWidgetDialog").within(() => {
|
||||
cy.get("input").type(queryName);
|
||||
cy.get(`.query-selector-result[data-test="QueryId${queryId}"]`).click();
|
||||
});
|
||||
cy.contains("button", "Add to Dashboard").click();
|
||||
|
||||
24
compose.base.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
services:
|
||||
.redash:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
FRONTEND_BUILD_MODE: ${FRONTEND_BUILD_MODE:-2}
|
||||
INSTALL_GROUPS: ${INSTALL_GROUPS:-main,all_ds,dev}
|
||||
volumes:
|
||||
- $PWD:${SERVER_MOUNT:-/ignore}
|
||||
command: manage version
|
||||
environment:
|
||||
REDASH_LOG_LEVEL: INFO
|
||||
REDASH_REDIS_URL: redis://redis:6379/0
|
||||
REDASH_DATABASE_URL: postgresql://postgres@postgres/postgres
|
||||
REDASH_RATELIMIT_ENABLED: false
|
||||
REDASH_MAIL_DEFAULT_SENDER: redash@example.com
|
||||
REDASH_MAIL_SERVER: email
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: true
|
||||
REDASH_COOKIE_SECRET: ${REDASH_COOKIE_SECRET}
|
||||
REDASH_SECRET_KEY: ${REDASH_SECRET_KEY}
|
||||
REDASH_PRODUCTION: ${REDASH_PRODUCTION:-true}
|
||||
env_file:
|
||||
- .env
|
||||
81
compose.yaml
Normal file
@@ -0,0 +1,81 @@
|
||||
services:
|
||||
server:
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
command: server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "${REDASH_PORT:-5001}:5000"
|
||||
- "5678:5678"
|
||||
environment:
|
||||
PYTHONUNBUFFERED: 0
|
||||
scheduler:
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
profiles:
|
||||
- e2e
|
||||
- local
|
||||
command: scheduler
|
||||
depends_on:
|
||||
- server
|
||||
worker:
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
profiles:
|
||||
- e2e
|
||||
- local
|
||||
command: worker
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
PYTHONUNBUFFERED: 0
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-15432}:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
# improvement on my personal machine). We should consider moving this into a dedicated Docker Compose configuration for
|
||||
# tests.
|
||||
command: postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
email:
|
||||
image: maildev/maildev
|
||||
ports:
|
||||
- "1080:1080"
|
||||
- "1025:1025"
|
||||
restart: unless-stopped
|
||||
cypress:
|
||||
ipc: host
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.cypress
|
||||
profiles:
|
||||
- e2e
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
- scheduler
|
||||
environment:
|
||||
CYPRESS_baseUrl: http://server:5000
|
||||
PERCY_TOKEN: ${PERCY_TOKEN:-""}
|
||||
PERCY_BRANCH: ${PERCY_BRANCH:-""}
|
||||
PERCY_COMMIT: ${PERCY_COMMIT:-""}
|
||||
PERCY_PULL_REQUEST: ${PERCY_PULL_REQUEST:-}
|
||||
COMMIT_INFO_BRANCH: ${COMMIT_INFO_BRANCH:-""}
|
||||
COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE:-""}
|
||||
COMMIT_INFO_AUTHOR: ${COMMIT_INFO_AUTHOR:-""}
|
||||
COMMIT_INFO_SHA: ${COMMIT_INFO_SHA:-""}
|
||||
COMMIT_INFO_REMOTE: ${COMMIT_INFO_REMOTE:-""}
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID:-""}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY:-""}
|
||||
CYPRESS_COVERAGE: ${CYPRESS_COVERAGE:-true}
|
||||
@@ -1,71 +0,0 @@
|
||||
# This configuration file is for the **development** setup.
|
||||
# For a production example please refer to getredash/setup repository on GitHub.
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
skip_frontend_build: "true" # set to empty string to build
|
||||
volumes:
|
||||
- .:/app
|
||||
env_file:
|
||||
- .env
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_MAIL_DEFAULT_SENDER: "redash@example.com"
|
||||
REDASH_MAIL_SERVER: "email"
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_GUNICORN_TIMEOUT: 60
|
||||
# Set secret keys in the .env file
|
||||
services:
|
||||
server:
|
||||
<<: *redash-service
|
||||
command: dev_server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5001:5000"
|
||||
- "5678:5678"
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
scheduler:
|
||||
<<: *redash-service
|
||||
command: dev_scheduler
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
worker:
|
||||
<<: *redash-service
|
||||
command: dev_worker
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
ports:
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
# improvement on my personal machine). We should consider moving this into a dedicated Docker Compose configuration for
|
||||
# tests.
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
email:
|
||||
image: maildev/maildev
|
||||
ports:
|
||||
- "1080:1080"
|
||||
- "1025:1025"
|
||||
restart: unless-stopped
|
||||
@@ -7,7 +7,7 @@ Create Date: 2020-12-23 21:35:32.766354
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0ec979123ba4'
|
||||
@@ -18,7 +18,7 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('dashboards', sa.Column('options', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||
op.add_column('dashboards', sa.Column('options', JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@@ -10,8 +10,7 @@ import json
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
|
||||
from redash.models import MutableDict, PseudoJSON
|
||||
from redash.models import MutableDict
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -41,7 +40,7 @@ def upgrade():
|
||||
"queries",
|
||||
sa.Column(
|
||||
"schedule",
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
sa.Text(),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
@@ -51,7 +50,7 @@ def upgrade():
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
sa.Column("old_schedule", sa.String(length=10)),
|
||||
)
|
||||
|
||||
@@ -85,7 +84,7 @@ def downgrade():
|
||||
"queries",
|
||||
sa.Column(
|
||||
"old_schedule",
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
sa.Text(),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
@@ -93,8 +92,8 @@ def downgrade():
|
||||
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
sa.Column("old_schedule", sa.Text()),
|
||||
)
|
||||
|
||||
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
|
||||
@@ -106,7 +105,7 @@ def downgrade():
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", sa.String(length=10)),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", sa.Text()),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -0,0 +1,135 @@
|
||||
"""change type of json fields from varchar to json
|
||||
|
||||
Revision ID: 7205816877ec
|
||||
Revises: 7ce5925f832b
|
||||
Create Date: 2024-01-03 13:55:18.885021
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB, JSON
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7205816877ec'
|
||||
down_revision = '7ce5925f832b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
connection = op.get_bind()
|
||||
op.alter_column('queries', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='schedule::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='additional_properties::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='settings::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='layout::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='change::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('visualizations', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('widgets', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
|
||||
|
||||
def downgrade():
|
||||
connection = op.get_bind()
|
||||
op.alter_column('queries', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='options::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='schedule::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='additional_properties::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='settings::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='layout::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='change::json',
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('visualizations', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('widgets', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
@@ -7,10 +7,9 @@ Create Date: 2019-01-17 13:22:21.729334
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.sql import table
|
||||
|
||||
from redash.models import MutableDict, PseudoJSON
|
||||
from redash.models import MutableDict
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "73beceabb948"
|
||||
@@ -43,7 +42,7 @@ def upgrade():
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
"""create sqlalchemy_searchable expressions
|
||||
|
||||
Revision ID: 7ce5925f832b
|
||||
Revises: 1038c2174f5d
|
||||
Create Date: 2023-09-29 16:48:29.517762
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy_searchable import sql_expressions
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7ce5925f832b'
|
||||
down_revision = '1038c2174f5d'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute(sql_expressions)
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
@@ -6,7 +6,7 @@ Create Date: 2018-01-31 15:20:30.396533
|
||||
|
||||
"""
|
||||
|
||||
import simplejson
|
||||
import json
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
@@ -27,7 +27,7 @@ def upgrade():
|
||||
dashboard_result = db.session.execute("SELECT id, layout FROM dashboards")
|
||||
for dashboard in dashboard_result:
|
||||
print(" Updating dashboard: {}".format(dashboard["id"]))
|
||||
layout = simplejson.loads(dashboard["layout"])
|
||||
layout = json.loads(dashboard["layout"])
|
||||
|
||||
print(" Building widgets map:")
|
||||
widgets = {}
|
||||
@@ -53,7 +53,7 @@ def upgrade():
|
||||
if widget is None:
|
||||
continue
|
||||
|
||||
options = simplejson.loads(widget["options"]) or {}
|
||||
options = json.loads(widget["options"]) or {}
|
||||
options["position"] = {
|
||||
"row": row_index,
|
||||
"col": column_index * column_size,
|
||||
@@ -62,7 +62,7 @@ def upgrade():
|
||||
|
||||
db.session.execute(
|
||||
"UPDATE widgets SET options=:options WHERE id=:id",
|
||||
{"options": simplejson.dumps(options), "id": widget_id},
|
||||
{"options": json.dumps(options), "id": widget_id},
|
||||
)
|
||||
|
||||
dashboard_result.close()
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2019-01-31 09:21:31.517265
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import BYTEA
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
|
||||
@@ -15,10 +15,8 @@ from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
PseudoJSON,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -31,7 +29,7 @@ depends_on = None
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"data_sources",
|
||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True),
|
||||
sa.Column("encrypted_options", BYTEA(), nullable=True),
|
||||
)
|
||||
|
||||
# copy values
|
||||
@@ -46,7 +44,14 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -9,7 +9,7 @@ import re
|
||||
from funcy import flatten, compact
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from redash import models
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -21,10 +21,10 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
"dashboards", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
op.add_column(
|
||||
"queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
"queries", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,17 +7,14 @@ Create Date: 2020-12-14 21:42:48.661684
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import BYTEA
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
|
||||
from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.base import key_type
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
)
|
||||
from redash.models.types import EncryptedConfiguration
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -30,7 +27,7 @@ depends_on = None
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"notification_destinations",
|
||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True)
|
||||
sa.Column("encrypted_options", BYTEA(), nullable=True)
|
||||
)
|
||||
|
||||
# copy values
|
||||
@@ -45,7 +42,14 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2018-11-08 16:12:17.023569
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "e7f8a917aa8e"
|
||||
@@ -21,7 +21,7 @@ def upgrade():
|
||||
"users",
|
||||
sa.Column(
|
||||
"details",
|
||||
postgresql.JSON(astext_type=sa.Text()),
|
||||
JSON(astext_type=sa.Text()),
|
||||
server_default="{}",
|
||||
nullable=True,
|
||||
),
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2022-01-31 15:24:16.507888
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import JSON, JSONB
|
||||
|
||||
from redash.models import db
|
||||
|
||||
@@ -23,8 +23,8 @@ def upgrade():
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
### end Alembic commands ###
|
||||
@@ -52,8 +52,8 @@ def downgrade():
|
||||
connection.execute(update_query)
|
||||
db.session.commit()
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::json"))
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
command = "cd ../ && yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 && yarn build && cd ./client"
|
||||
|
||||
[build.environment]
|
||||
NODE_VERSION = "16.20.1"
|
||||
NODE_VERSION = "18"
|
||||
NETLIFY_USE_YARN = "true"
|
||||
YARN_VERSION = "1.22.19"
|
||||
CYPRESS_INSTALL_BINARY = "0"
|
||||
|
||||
24
package.json
@@ -1,20 +1,19 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "23.09.0-dev",
|
||||
"version": "24.05.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "npm-run-all --parallel watch:viz webpack-dev-server",
|
||||
"clean": "rm -rf ./client/dist/",
|
||||
"build:viz": "(cd viz-lib && yarn build:babel)",
|
||||
"build": "yarn clean && yarn build:viz && NODE_ENV=production webpack",
|
||||
"build:old-node-version": "yarn clean && NODE_ENV=production node --max-old-space-size=4096 node_modules/.bin/webpack",
|
||||
"watch:app": "webpack watch --progress",
|
||||
"build": "yarn clean && yarn build:viz && NODE_OPTIONS=--openssl-legacy-provider NODE_ENV=production webpack",
|
||||
"watch:app": "NODE_OPTIONS=--openssl-legacy-provider webpack watch --progress",
|
||||
"watch:viz": "(cd viz-lib && yarn watch:babel)",
|
||||
"watch": "npm-run-all --parallel watch:*",
|
||||
"webpack-dev-server": "webpack-dev-server",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on webpack",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"lint": "yarn lint:base --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:fix": "yarn lint:base --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:base": "eslint --config ./client/.eslintrc.js --ignore-path ./client/.eslintignore",
|
||||
@@ -25,7 +24,7 @@
|
||||
"jest": "TZ=Africa/Khartoum jest",
|
||||
"test": "run-s type-check jest",
|
||||
"test:watch": "jest --watch",
|
||||
"cypress": "node client/cypress/cypress.js",
|
||||
"cypress": "COMPOSE_PROFILES=local node client/cypress/cypress.js",
|
||||
"preinstall": "cd viz-lib && yarn link --link-folder ../.yarn",
|
||||
"postinstall": "(cd viz-lib && yarn --frozen-lockfile && yarn build:babel) && yarn link --link-folder ./.yarn @redash/viz"
|
||||
},
|
||||
@@ -34,7 +33,8 @@
|
||||
"url": "git+https://github.com/getredash/redash.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">14.16.0 <17.0.0",
|
||||
"node": ">16.0 <21.0",
|
||||
"npm": "please-use-yarn",
|
||||
"yarn": "^1.22.10"
|
||||
},
|
||||
"author": "Redash Contributors",
|
||||
@@ -53,7 +53,7 @@
|
||||
"bootstrap": "^3.3.7",
|
||||
"classnames": "^2.2.6",
|
||||
"d3": "^3.5.17",
|
||||
"debug": "^3.1.0",
|
||||
"debug": "^3.2.7",
|
||||
"dompurify": "^2.0.17",
|
||||
"font-awesome": "^4.7.0",
|
||||
"history": "^4.10.1",
|
||||
@@ -62,7 +62,7 @@
|
||||
"material-design-iconic-font": "^2.2.0",
|
||||
"mousetrap": "^1.6.1",
|
||||
"mustache": "^2.3.0",
|
||||
"numbro": "^2.3.6",
|
||||
"numeral": "^2.0.6",
|
||||
"path-to-regexp": "^3.1.0",
|
||||
"prop-types": "^15.6.1",
|
||||
"query-string": "^6.9.0",
|
||||
@@ -178,6 +178,10 @@
|
||||
"viz-lib/**"
|
||||
]
|
||||
},
|
||||
"browser": {
|
||||
"fs": false,
|
||||
"path": false
|
||||
},
|
||||
"//": "browserslist set to 'Async functions' compatibility",
|
||||
"browserslist": [
|
||||
"Edge >= 15",
|
||||
|
||||
5303
poetry.lock
generated
Normal file
170
pyproject.toml
@@ -10,9 +10,167 @@ force-exclude = '''
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.isort]
|
||||
py_version = 38
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
use_parentheses = true
|
||||
skip = "migrations"
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "24.05.0-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
maintainers = [
|
||||
"Redash maintainers and contributors <maintainers@redash.io>",
|
||||
]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8,<3.11"
|
||||
advocate = "1.0.0"
|
||||
aniso8601 = "8.0.0"
|
||||
authlib = "0.15.5"
|
||||
backoff = "2.2.1"
|
||||
blinker = "1.6.2"
|
||||
click = "8.1.3"
|
||||
cryptography = "41.0.6"
|
||||
disposable-email-domains = ">=0.0.52"
|
||||
flask = "2.3.2"
|
||||
flask-limiter = "3.3.1"
|
||||
flask-login = "0.6.0"
|
||||
flask-mail = "0.9.1"
|
||||
flask-migrate = "2.5.2"
|
||||
flask-restful = "0.3.10"
|
||||
flask-sqlalchemy = "2.5.1"
|
||||
flask-talisman = "0.7.0"
|
||||
flask-wtf = "1.1.1"
|
||||
funcy = "1.13"
|
||||
gevent = "23.9.1"
|
||||
greenlet = "2.0.2"
|
||||
gunicorn = "22.0.0"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.3"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
parsedatetime = "2.4"
|
||||
passlib = "1.7.3"
|
||||
psycopg2-binary = "2.9.6"
|
||||
pyjwt = "2.4.0"
|
||||
pyopenssl = "23.2.0"
|
||||
pypd = "1.1.0"
|
||||
pysaml2 = "7.3.1"
|
||||
pystache = "0.6.0"
|
||||
python-dateutil = "2.8.0"
|
||||
python-dotenv = "0.19.2"
|
||||
pytz = ">=2019.3"
|
||||
pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.31.0"
|
||||
restrictedpython = "6.2"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
semver = "2.8.1"
|
||||
sentry-sdk = "1.28.1"
|
||||
sqlalchemy = "1.3.24"
|
||||
sqlalchemy-searchable = "1.2.0"
|
||||
sqlalchemy-utils = "0.34.2"
|
||||
sqlparse = "0.5.0"
|
||||
sshtunnel = "0.1.5"
|
||||
statsd = "3.3.0"
|
||||
supervisor = "4.1.0"
|
||||
supervisor-checks = "0.8.1"
|
||||
ua-parser = "0.18.0"
|
||||
urllib3 = "1.26.18"
|
||||
user-agents = "2.0"
|
||||
werkzeug = "2.3.8"
|
||||
wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.all_ds.dependencies]
|
||||
atsd-client = "3.0.5"
|
||||
azure-kusto-data = "0.0.35"
|
||||
boto3 = "1.28.8"
|
||||
botocore = "1.31.8"
|
||||
cassandra-driver = "3.21.0"
|
||||
certifi = ">=2019.9.11"
|
||||
cmem-cmempy = "21.2.3"
|
||||
databend-py = "0.4.6"
|
||||
databend-sqlalchemy = "0.2.4"
|
||||
google-api-python-client = "1.7.11"
|
||||
gspread = "5.11.2"
|
||||
impyla = "0.16.0"
|
||||
influxdb = "5.2.3"
|
||||
influxdb-client = "1.38.0"
|
||||
memsql = "3.2.0"
|
||||
mysqlclient = "2.1.1"
|
||||
nzalchemy = "^11.0.2"
|
||||
nzpy = ">=1.15"
|
||||
oauth2client = "4.1.3"
|
||||
openpyxl = "3.0.7"
|
||||
oracledb = "2.1.2"
|
||||
pandas = "1.3.4"
|
||||
phoenixdb = "0.7"
|
||||
pinotdb = ">=0.4.5"
|
||||
protobuf = "3.20.2"
|
||||
pyathena = ">=1.5.0,<=1.11.5"
|
||||
pydgraph = "2.0.2"
|
||||
pydruid = "0.5.7"
|
||||
pyexasol = "0.12.0"
|
||||
pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymssql = "2.2.8"
|
||||
pyodbc = "4.0.28"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.1.0"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
snowflake-connector-python = "3.4.0"
|
||||
td-client = "1.0.0"
|
||||
thrift = ">=0.8.0"
|
||||
thrift-sasl = ">=0.1.0"
|
||||
trino = ">=0.305,<1.0"
|
||||
vertica-python = "1.1.1"
|
||||
xlrd = "2.0.1"
|
||||
e6data-python-connector = "1.1.9"
|
||||
|
||||
[tool.poetry.group.ldap3]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.ldap3.dependencies]
|
||||
ldap3 = "2.9.1"
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "7.4.0"
|
||||
coverage = "7.2.7"
|
||||
freezegun = "1.2.1"
|
||||
jwcrypto = "1.5.6"
|
||||
mock = "5.0.2"
|
||||
pre-commit = "3.3.3"
|
||||
ptpython = "3.0.23"
|
||||
ptvsd = "4.3.2"
|
||||
pytest-cov = "4.1.0"
|
||||
watchdog = "3.0.0"
|
||||
ruff = "0.0.289"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.ruff]
|
||||
exclude = [".git", "viz-lib", "node_modules", "migrations"]
|
||||
ignore = ["E501"]
|
||||
select = ["C9", "E", "F", "W", "I001", "UP004"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 15
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"__init__.py" = ["F401"]
|
||||
|
||||
@@ -14,7 +14,7 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "23.09.0-dev"
|
||||
__version__ = "24.05.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
|
||||
@@ -36,14 +36,10 @@ def create_app():
|
||||
from .metrics import request as request_metrics
|
||||
from .models import db, users
|
||||
from .utils import sentry
|
||||
from .version_check import reset_new_version_status
|
||||
|
||||
sentry.init()
|
||||
app = Redash()
|
||||
|
||||
# Check and update the cached version for use by the client
|
||||
reset_new_version_status()
|
||||
|
||||
security.init_app(app)
|
||||
request_metrics.init_app(app)
|
||||
db.init_app(app)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger("jwt_auth")
|
||||
|
||||
@@ -25,7 +25,7 @@ def get_public_key_from_net(url):
|
||||
if "keys" in data:
|
||||
public_keys = []
|
||||
for key_dict in data["keys"]:
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(simplejson.dumps(key_dict))
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(json.dumps(key_dict))
|
||||
public_keys.append(public_key)
|
||||
|
||||
get_public_keys.key_cache[url] = public_keys
|
||||
|
||||
@@ -11,7 +11,7 @@ try:
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit(
|
||||
"The ldap3 library was not found. This is required to use LDAP authentication (see requirements.txt)."
|
||||
"The ldap3 library was not found. This is required to use LDAP authentication. Rebuild the Docker image installing the `ldap3` poetry dependency group."
|
||||
)
|
||||
|
||||
from redash.authentication import (
|
||||
|
||||
@@ -90,8 +90,8 @@ def get_saml_client(org):
|
||||
|
||||
saml_settings["metadata"] = {"inline": [metadata_inline]}
|
||||
|
||||
if acs_url is not None and acs_url != "":
|
||||
saml_settings["entityid"] = acs_url
|
||||
if entity_id is not None and entity_id != "":
|
||||
saml_settings["entityid"] = entity_id
|
||||
|
||||
if sp_settings:
|
||||
import json
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
|
||||
import click
|
||||
import simplejson
|
||||
from flask import current_app
|
||||
from flask.cli import FlaskGroup, run_command, with_appcontext
|
||||
from rq import Connection
|
||||
@@ -53,7 +54,7 @@ def version():
|
||||
@manager.command()
|
||||
def status():
|
||||
with Connection(rq_redis_connection):
|
||||
print(simplejson.dumps(get_status(), indent=2))
|
||||
print(json.dumps(get_status(), indent=2))
|
||||
|
||||
|
||||
@manager.command()
|
||||
|
||||
@@ -5,7 +5,7 @@ logger = logging.getLogger(__name__)
|
||||
__all__ = ["BaseDestination", "register", "get_destination", "import_destinations"]
|
||||
|
||||
|
||||
class BaseDestination(object):
|
||||
class BaseDestination:
|
||||
deprecated = False
|
||||
|
||||
def __init__(self, configuration):
|
||||
|
||||
93
redash/destinations/datadog.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
from redash.destinations import BaseDestination, register
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class Datadog(BaseDestination):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"api_key": {"type": "string", "title": "API Key"},
|
||||
"tags": {"type": "string", "title": "Tags"},
|
||||
"priority": {"type": "string", "default": "normal", "title": "Priority"},
|
||||
# https://docs.datadoghq.com/integrations/faq/list-of-api-source-attribute-value/
|
||||
"source_type_name": {"type": "string", "default": "my_apps", "title": "Source Type Name"},
|
||||
},
|
||||
"secret": ["api_key"],
|
||||
"required": ["api_key"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return "fa-datadog"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, metadata, options):
|
||||
# Documentation: https://docs.datadoghq.com/api/latest/events/#post-an-event
|
||||
if new_state == "triggered":
|
||||
alert_type = "error"
|
||||
if alert.custom_subject:
|
||||
title = alert.custom_subject
|
||||
else:
|
||||
title = f"{alert.name} just triggered"
|
||||
else:
|
||||
alert_type = "success"
|
||||
if alert.custom_subject:
|
||||
title = alert.custom_subject
|
||||
else:
|
||||
title = f"{alert.name} went back to normal"
|
||||
|
||||
if alert.custom_body:
|
||||
text = alert.custom_body
|
||||
else:
|
||||
text = f"{alert.name} changed state to {new_state}."
|
||||
|
||||
query_url = f"{host}/queries/{query.id}"
|
||||
alert_url = f"{host}/alerts/{alert.id}"
|
||||
text += f"\nQuery: {query_url}\nAlert: {alert_url}"
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"DD-API-KEY": options.get("api_key"),
|
||||
}
|
||||
|
||||
body = {
|
||||
"title": title,
|
||||
"text": text,
|
||||
"alert_type": alert_type,
|
||||
"priority": options.get("priority"),
|
||||
"source_type_name": options.get("source_type_name"),
|
||||
"aggregation_key": f"redash:{alert_url}",
|
||||
"tags": [],
|
||||
}
|
||||
|
||||
tags = options.get("tags")
|
||||
if tags:
|
||||
body["tags"] = tags.split(",")
|
||||
body["tags"].extend(
|
||||
[
|
||||
"redash",
|
||||
f"query_id:{query.id}",
|
||||
f"alert_id:{alert.id}",
|
||||
]
|
||||
)
|
||||
|
||||
dd_host = os.getenv("DATADOG_HOST", "api.datadoghq.com")
|
||||
url = f"https://{dd_host}/api/v1/events"
|
||||
|
||||
try:
|
||||
resp = requests.post(url, headers=headers, data=json_dumps(body), timeout=5.0)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 202:
|
||||
logging.error(f"Datadog send ERROR. status_code => {resp.status_code}")
|
||||
except Exception as e:
|
||||
logging.exception("Datadog send ERROR: %s", e)
|
||||
|
||||
|
||||
register(Datadog)
|
||||
@@ -42,8 +42,8 @@ class Discord(BaseDestination):
|
||||
"inline": True,
|
||||
},
|
||||
]
|
||||
if alert.options.get("custom_body"):
|
||||
fields.append({"name": "Description", "value": alert.options["custom_body"]})
|
||||
if alert.custom_body:
|
||||
fields.append({"name": "Description", "value": alert.custom_body})
|
||||
if new_state == Alert.TRIGGERED_STATE:
|
||||
if alert.options.get("custom_subject"):
|
||||
text = alert.options["custom_subject"]
|
||||
|
||||
@@ -26,13 +26,13 @@ class Slack(BaseDestination):
|
||||
fields = [
|
||||
{
|
||||
"title": "Query",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
"title": "Alert",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
|
||||
"short": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
@@ -50,7 +50,7 @@ class Slack(BaseDestination):
|
||||
payload = {"attachments": [{"text": text, "color": color, "fields": fields}]}
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload), timeout=5.0)
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload).encode("utf-8"), timeout=5.0)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
|
||||
138
redash/destinations/webex.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
|
||||
import requests
|
||||
|
||||
from redash.destinations import BaseDestination, register
|
||||
from redash.models import Alert
|
||||
|
||||
|
||||
class Webex(BaseDestination):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"webex_bot_token": {"type": "string", "title": "Webex Bot Token"},
|
||||
"to_person_emails": {
|
||||
"type": "string",
|
||||
"title": "People (comma-separated)",
|
||||
},
|
||||
"to_room_ids": {
|
||||
"type": "string",
|
||||
"title": "Rooms (comma-separated)",
|
||||
},
|
||||
},
|
||||
"secret": ["webex_bot_token"],
|
||||
"required": ["webex_bot_token"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return "fa-webex"
|
||||
|
||||
@property
|
||||
def api_base_url(self):
|
||||
return "https://webexapis.com/v1/messages"
|
||||
|
||||
@staticmethod
|
||||
def formatted_attachments_template(subject, description, query_link, alert_link):
|
||||
return [
|
||||
{
|
||||
"contentType": "application/vnd.microsoft.card.adaptive",
|
||||
"content": {
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type": "AdaptiveCard",
|
||||
"version": "1.0",
|
||||
"body": [
|
||||
{
|
||||
"type": "ColumnSet",
|
||||
"columns": [
|
||||
{
|
||||
"type": "Column",
|
||||
"width": 4,
|
||||
"items": [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": {subject},
|
||||
"weight": "bolder",
|
||||
"size": "medium",
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": {description},
|
||||
"isSubtle": True,
|
||||
"wrap": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({query_link}) to check your query!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||
"wrap": True,
|
||||
"isSubtle": True,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, metadata, options):
|
||||
# Documentation: https://developer.webex.com/docs/api/guides/cards
|
||||
|
||||
query_link = f"{host}/queries/{query.id}"
|
||||
alert_link = f"{host}/alerts/{alert.id}"
|
||||
|
||||
if new_state == Alert.TRIGGERED_STATE:
|
||||
subject = alert.custom_subject or f"{alert.name} just triggered"
|
||||
else:
|
||||
subject = f"{alert.name} went back to normal"
|
||||
|
||||
attachments = self.formatted_attachments_template(
|
||||
subject=subject, description=alert.custom_body, query_link=query_link, alert_link=alert_link
|
||||
)
|
||||
|
||||
template_payload = {"markdown": subject + "\n" + alert.custom_body, "attachments": attachments}
|
||||
|
||||
headers = {"Authorization": f"Bearer {options['webex_bot_token']}"}
|
||||
|
||||
api_destinations = {
|
||||
"toPersonEmail": options.get("to_person_emails"),
|
||||
"roomId": options.get("to_room_ids"),
|
||||
}
|
||||
|
||||
for payload_tag, destinations in api_destinations.items():
|
||||
if destinations is None:
|
||||
continue
|
||||
|
||||
# destinations is guaranteed to be a comma-separated string
|
||||
for destination_id in destinations.split(","):
|
||||
payload = deepcopy(template_payload)
|
||||
payload[payload_tag] = destination_id
|
||||
self.post_message(payload, headers)
|
||||
|
||||
def post_message(self, payload, headers):
|
||||
try:
|
||||
resp = requests.post(
|
||||
self.api_base_url,
|
||||
json=payload,
|
||||
headers=headers,
|
||||
timeout=5.0,
|
||||
)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error("Webex send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
except Exception as e:
|
||||
logging.exception(f"Webex send ERROR: {e}")
|
||||
|
||||
|
||||
register(Webex)
|
||||
@@ -12,6 +12,7 @@ from redash.handlers.alerts import (
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
from redash.handlers.dashboards import (
|
||||
DashboardFavoriteListResource,
|
||||
DashboardForkResource,
|
||||
DashboardListResource,
|
||||
DashboardResource,
|
||||
DashboardShareResource,
|
||||
@@ -190,6 +191,7 @@ api.add_org_resource(
|
||||
"/api/dashboards/<object_id>/favorite",
|
||||
endpoint="dashboard_favorite",
|
||||
)
|
||||
api.add_org_resource(DashboardForkResource, "/api/dashboards/<dashboard_id>/fork", endpoint="dashboard_fork")
|
||||
|
||||
api.add_org_resource(MyDashboardsResource, "/api/dashboards/my", endpoint="my_dashboards")
|
||||
|
||||
@@ -234,11 +236,11 @@ api.add_org_resource(
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryResultResource,
|
||||
"/api/query_results/<query_result_id>.<filetype>",
|
||||
"/api/query_results/<query_result_id>",
|
||||
"/api/query_results/<result_id>.<filetype>",
|
||||
"/api/query_results/<result_id>",
|
||||
"/api/queries/<query_id>/results",
|
||||
"/api/queries/<query_id>/results.<filetype>",
|
||||
"/api/queries/<query_id>/results/<query_result_id>.<filetype>",
|
||||
"/api/queries/<query_id>/results/<result_id>.<filetype>",
|
||||
endpoint="query_result",
|
||||
)
|
||||
api.add_org_resource(
|
||||
|
||||
@@ -15,7 +15,6 @@ from redash.authentication.account import (
|
||||
)
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.version_check import get_latest_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -256,15 +255,11 @@ def number_format_config():
|
||||
|
||||
def client_config():
|
||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||
client_config = {
|
||||
"newVersionAvailable": bool(get_latest_version()),
|
||||
client_config_inner = {
|
||||
"version": __version__,
|
||||
}
|
||||
else:
|
||||
client_config = {}
|
||||
|
||||
if current_user.has_permission("admin") and current_org.get_setting("beacon_consent") is None:
|
||||
client_config["showBeaconConsentMessage"] = True
|
||||
client_config_inner = {}
|
||||
|
||||
defaults = {
|
||||
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
@@ -284,12 +279,12 @@ def client_config():
|
||||
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||
}
|
||||
|
||||
client_config.update(defaults)
|
||||
client_config.update({"basePath": base_href()})
|
||||
client_config.update(date_time_format_config())
|
||||
client_config.update(number_format_config())
|
||||
client_config_inner.update(defaults)
|
||||
client_config_inner.update({"basePath": base_href()})
|
||||
client_config_inner.update(date_time_format_config())
|
||||
client_config_inner.update(number_format_config())
|
||||
|
||||
return client_config
|
||||
return client_config_inner
|
||||
|
||||
|
||||
def messages():
|
||||
|
||||
@@ -5,7 +5,7 @@ from flask import Blueprint, current_app, request
|
||||
from flask_login import current_user, login_required
|
||||
from flask_restful import Resource, abort
|
||||
from sqlalchemy import cast
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy_utils.functions import sort_query
|
||||
|
||||
@@ -114,7 +114,7 @@ def json_response(response):
|
||||
def filter_by_tags(result_set, column):
|
||||
if request.args.getlist("tags"):
|
||||
tags = request.args.getlist("tags")
|
||||
result_set = result_set.filter(cast(column, postgresql.ARRAY(db.Text)).contains(tags))
|
||||
result_set = result_set.filter(cast(column, ARRAY(db.Text)).contains(tags))
|
||||
return result_set
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@ from redash.handlers.base import (
|
||||
BaseResource,
|
||||
filter_by_tags,
|
||||
get_object_or_404,
|
||||
paginate,
|
||||
)
|
||||
from redash.handlers.base import order_results as _order_results
|
||||
from redash.handlers.base import paginate
|
||||
from redash.permissions import (
|
||||
can_modify,
|
||||
require_admin_or_owner,
|
||||
@@ -96,7 +96,7 @@ class DashboardListResource(BaseResource):
|
||||
org=self.current_org,
|
||||
user=self.current_user,
|
||||
is_draft=True,
|
||||
layout="[]",
|
||||
layout=[],
|
||||
)
|
||||
models.db.session.add(dashboard)
|
||||
models.db.session.commit()
|
||||
@@ -398,3 +398,16 @@ class DashboardFavoriteListResource(BaseResource):
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class DashboardForkResource(BaseResource):
|
||||
@require_permission("edit_dashboard")
|
||||
def post(self, dashboard_id):
|
||||
dashboard = models.Dashboard.get_by_id_and_org(dashboard_id, self.current_org)
|
||||
|
||||
fork_dashboard = dashboard.fork(self.current_user)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({"action": "fork", "object_id": dashboard_id, "object_type": "dashboard"})
|
||||
|
||||
return DashboardSerializer(fork_dashboard, with_widgets=True).serialize()
|
||||
|
||||
@@ -15,7 +15,7 @@ def organization_status(org_slug=None):
|
||||
"data_sources": models.DataSource.all(current_org, group_ids=current_user.group_ids).count(),
|
||||
"queries": models.Query.all_queries(current_user.group_ids, current_user.id, include_drafts=True).count(),
|
||||
"dashboards": models.Dashboard.query.filter(
|
||||
models.Dashboard.org == current_org, models.Dashboard.is_archived is False
|
||||
models.Dashboard.org == current_org, models.Dashboard.is_archived.is_(False)
|
||||
).count(),
|
||||
}
|
||||
|
||||
|
||||
@@ -11,9 +11,11 @@ from redash.handlers.base import (
|
||||
BaseResource,
|
||||
filter_by_tags,
|
||||
get_object_or_404,
|
||||
org_scoped_rule,
|
||||
paginate,
|
||||
routes,
|
||||
)
|
||||
from redash.handlers.base import order_results as _order_results
|
||||
from redash.handlers.base import org_scoped_rule, paginate, routes
|
||||
from redash.handlers.query_results import run_query
|
||||
from redash.models.parameterized_query import ParameterizedQuery
|
||||
from redash.permissions import (
|
||||
|
||||
@@ -5,6 +5,7 @@ import regex
|
||||
from flask import make_response, request
|
||||
from flask_login import current_user
|
||||
from flask_restful import abort
|
||||
from rq.job import JobStatus
|
||||
|
||||
from redash import models, settings
|
||||
from redash.handlers.base import BaseResource, get_object_or_404, record_event
|
||||
@@ -38,7 +39,7 @@ from redash.utils import (
|
||||
|
||||
|
||||
def error_response(message, http_status=400):
|
||||
return {"job": {"status": 4, "error": message}}, http_status
|
||||
return {"job": {"status": JobStatus.FAILED, "error": message}}, http_status
|
||||
|
||||
|
||||
error_messages = {
|
||||
@@ -225,7 +226,7 @@ class QueryResultResource(BaseResource):
|
||||
headers["Access-Control-Allow-Credentials"] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
|
||||
|
||||
@require_any_of_permission(("view_query", "execute_query"))
|
||||
def options(self, query_id=None, query_result_id=None, filetype="json"):
|
||||
def options(self, query_id=None, result_id=None, filetype="json"):
|
||||
headers = {}
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
@@ -285,12 +286,12 @@ class QueryResultResource(BaseResource):
|
||||
return error_messages["no_permission"]
|
||||
|
||||
@require_any_of_permission(("view_query", "execute_query"))
|
||||
def get(self, query_id=None, query_result_id=None, filetype="json"):
|
||||
def get(self, query_id=None, result_id=None, filetype="json"):
|
||||
"""
|
||||
Retrieve query results.
|
||||
|
||||
:param number query_id: The ID of the query whose results should be fetched
|
||||
:param number query_result_id: the ID of the query result to fetch
|
||||
:param number result_id: the ID of the query result to fetch
|
||||
:param string filetype: Format to return. One of 'json', 'xlsx', or 'csv'. Defaults to 'json'.
|
||||
|
||||
:<json number id: Query result ID
|
||||
@@ -305,13 +306,13 @@ class QueryResultResource(BaseResource):
|
||||
# This method handles two cases: retrieving result by id & retrieving result by query id.
|
||||
# They need to be split, as they have different logic (for example, retrieving by query id
|
||||
# should check for query parameters and shouldn't cache the result).
|
||||
should_cache = query_result_id is not None
|
||||
should_cache = result_id is not None
|
||||
|
||||
query_result = None
|
||||
query = None
|
||||
|
||||
if query_result_id:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query_result_id, self.current_org)
|
||||
if result_id:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, result_id, self.current_org)
|
||||
|
||||
if query_id is not None:
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
@@ -346,7 +347,7 @@ class QueryResultResource(BaseResource):
|
||||
event["object_id"] = query_id
|
||||
else:
|
||||
event["object_type"] = "query_result"
|
||||
event["object_id"] = query_result_id
|
||||
event["object_id"] = result_id
|
||||
|
||||
self.record_event(event)
|
||||
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
from flask import g, redirect, render_template, request, url_for
|
||||
from flask_login import login_user
|
||||
from wtforms import BooleanField, Form, PasswordField, StringField, validators
|
||||
from wtforms import Form, PasswordField, StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import routes
|
||||
from redash.models import Group, Organization, User, db
|
||||
from redash.tasks.general import subscribe
|
||||
|
||||
|
||||
class SetupForm(Form):
|
||||
@@ -15,8 +14,6 @@ class SetupForm(Form):
|
||||
email = EmailField("Email Address", validators=[validators.Email()])
|
||||
password = PasswordField("Password", validators=[validators.Length(6)])
|
||||
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
||||
security_notifications = BooleanField()
|
||||
newsletter = BooleanField()
|
||||
|
||||
|
||||
def create_org(org_name, user_name, email, password):
|
||||
@@ -57,8 +54,6 @@ def setup():
|
||||
return redirect("/")
|
||||
|
||||
form = SetupForm(request.form)
|
||||
form.newsletter.data = True
|
||||
form.security_notifications.data = True
|
||||
|
||||
if request.method == "POST" and form.validate():
|
||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||
@@ -66,10 +61,6 @@ def setup():
|
||||
g.org = default_org
|
||||
login_user(user)
|
||||
|
||||
# signup to newsletter if needed
|
||||
if form.newsletter.data or form.security_notifications:
|
||||
subscribe.delay(form.data)
|
||||
|
||||
return redirect(url_for("redash.index", org_slug=None))
|
||||
|
||||
return render_template("setup.html", form=form)
|
||||
|
||||
@@ -13,9 +13,13 @@ from redash.authentication.account import (
|
||||
send_password_reset_email,
|
||||
send_verify_email,
|
||||
)
|
||||
from redash.handlers.base import BaseResource, get_object_or_404
|
||||
from redash.handlers.base import (
|
||||
BaseResource,
|
||||
get_object_or_404,
|
||||
paginate,
|
||||
require_fields,
|
||||
)
|
||||
from redash.handlers.base import order_results as _order_results
|
||||
from redash.handlers.base import paginate, require_fields
|
||||
from redash.permissions import (
|
||||
is_admin_or_owner,
|
||||
require_admin,
|
||||
|
||||
@@ -7,7 +7,6 @@ from redash.permissions import (
|
||||
require_permission,
|
||||
)
|
||||
from redash.serializers import serialize_visualization
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class VisualizationListResource(BaseResource):
|
||||
@@ -18,7 +17,6 @@ class VisualizationListResource(BaseResource):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org)
|
||||
require_object_modify_permission(query, self.current_user)
|
||||
|
||||
kwargs["options"] = json_dumps(kwargs["options"])
|
||||
kwargs["query_rel"] = query
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
@@ -34,8 +32,6 @@ class VisualizationResource(BaseResource):
|
||||
require_object_modify_permission(vis.query_rel, self.current_user)
|
||||
|
||||
kwargs = request.get_json(force=True)
|
||||
if "options" in kwargs:
|
||||
kwargs["options"] = json_dumps(kwargs["options"])
|
||||
|
||||
kwargs.pop("id", None)
|
||||
kwargs.pop("query_id", None)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
import simplejson
|
||||
from flask import url_for
|
||||
|
||||
WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), "../../client/dist/", "asset-manifest.json")
|
||||
@@ -15,7 +15,7 @@ def configure_webpack(app):
|
||||
if assets is None or app.debug:
|
||||
try:
|
||||
with open(WEBPACK_MANIFEST_PATH) as fp:
|
||||
assets = simplejson.load(fp)
|
||||
assets = json.load(fp)
|
||||
except IOError:
|
||||
app.logger.exception("Unable to load webpack manifest")
|
||||
assets = {}
|
||||
|
||||
@@ -9,7 +9,6 @@ from redash.permissions import (
|
||||
view_only,
|
||||
)
|
||||
from redash.serializers import serialize_widget
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class WidgetListResource(BaseResource):
|
||||
@@ -30,7 +29,6 @@ class WidgetListResource(BaseResource):
|
||||
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get("dashboard_id"), self.current_org)
|
||||
require_object_modify_permission(dashboard, self.current_user)
|
||||
|
||||
widget_properties["options"] = json_dumps(widget_properties["options"])
|
||||
widget_properties.pop("id", None)
|
||||
|
||||
visualization_id = widget_properties.pop("visualization_id")
|
||||
@@ -44,7 +42,6 @@ class WidgetListResource(BaseResource):
|
||||
|
||||
widget = models.Widget(**widget_properties)
|
||||
models.db.session.add(widget)
|
||||
models.db.session.commit()
|
||||
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
@@ -65,7 +62,7 @@ class WidgetResource(BaseResource):
|
||||
require_object_modify_permission(widget.dashboard, self.current_user)
|
||||
widget_properties = request.get_json(force=True)
|
||||
widget.text = widget_properties["text"]
|
||||
widget.options = json_dumps(widget_properties["options"])
|
||||
widget.options = widget_properties["options"]
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import time
|
||||
|
||||
import pytz
|
||||
from sqlalchemy import UniqueConstraint, and_, cast, distinct, func, or_
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, JSONB
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from sqlalchemy.orm import (
|
||||
@@ -40,14 +40,17 @@ from redash.models.base import (
|
||||
from redash.models.changes import Change, ChangeTrackingMixin # noqa
|
||||
from redash.models.mixins import BelongsToOrgMixin, TimestampMixin
|
||||
from redash.models.organizations import Organization
|
||||
from redash.models.parameterized_query import ParameterizedQuery
|
||||
from redash.models.parameterized_query import (
|
||||
InvalidParameterError,
|
||||
ParameterizedQuery,
|
||||
QueryDetachedFromDataSourceError,
|
||||
)
|
||||
from redash.models.types import (
|
||||
Configuration,
|
||||
EncryptedConfiguration,
|
||||
JSONText,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
PseudoJSON,
|
||||
pseudo_json_cast_property,
|
||||
json_cast_property,
|
||||
)
|
||||
from redash.models.users import ( # noqa
|
||||
AccessPermission,
|
||||
@@ -80,7 +83,7 @@ from redash.utils.configuration import ConfigurationContainer
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ScheduledQueriesExecutions(object):
|
||||
class ScheduledQueriesExecutions:
|
||||
KEY_NAME = "sq:executed_at"
|
||||
|
||||
def __init__(self):
|
||||
@@ -123,7 +126,10 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
|
||||
data_source_groups = db.relationship("DataSourceGroup", back_populates="data_source", cascade="all")
|
||||
__tablename__ = "data_sources"
|
||||
__table_args__ = (db.Index("data_sources_org_id_name", "org_id", "name"),)
|
||||
__table_args__ = (
|
||||
db.Index("data_sources_org_id_name", "org_id", "name"),
|
||||
{"extend_existing": True},
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.id == other.id
|
||||
@@ -221,7 +227,16 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
|
||||
def _sort_schema(self, schema):
|
||||
return [
|
||||
{"name": i["name"], "columns": sorted(i["columns"], key=lambda x: x["name"] if isinstance(x, dict) else x)}
|
||||
{
|
||||
"name": i["name"],
|
||||
"description": i.get("description"),
|
||||
"columns": sorted(
|
||||
i["columns"],
|
||||
key=lambda col: (
|
||||
("partition" in col["type"], col.get("idx", 0), col["name"]) if isinstance(col, dict) else col
|
||||
),
|
||||
),
|
||||
}
|
||||
for i in sorted(schema, key=lambda x: x["name"])
|
||||
]
|
||||
|
||||
@@ -264,7 +279,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
|
||||
@property
|
||||
def uses_ssh_tunnel(self):
|
||||
return "ssh_tunnel" in self.options
|
||||
return self.options and "ssh_tunnel" in self.options
|
||||
|
||||
@property
|
||||
def query_runner(self):
|
||||
@@ -297,34 +312,11 @@ class DataSourceGroup(db.Model):
|
||||
view_only = Column(db.Boolean, default=False)
|
||||
|
||||
__tablename__ = "data_source_groups"
|
||||
|
||||
|
||||
DESERIALIZED_DATA_ATTR = "_deserialized_data"
|
||||
|
||||
|
||||
class DBPersistence(object):
|
||||
@property
|
||||
def data(self):
|
||||
if self._data is None:
|
||||
return None
|
||||
|
||||
if not hasattr(self, DESERIALIZED_DATA_ATTR):
|
||||
setattr(self, DESERIALIZED_DATA_ATTR, json_loads(self._data))
|
||||
|
||||
return self._deserialized_data
|
||||
|
||||
@data.setter
|
||||
def data(self, data):
|
||||
if hasattr(self, DESERIALIZED_DATA_ATTR):
|
||||
delattr(self, DESERIALIZED_DATA_ATTR)
|
||||
self._data = data
|
||||
|
||||
|
||||
QueryResultPersistence = settings.dynamic_settings.QueryResultPersistence or DBPersistence
|
||||
__table_args__ = ({"extend_existing": True},)
|
||||
|
||||
|
||||
@generic_repr("id", "org_id", "data_source_id", "query_hash", "runtime", "retrieved_at")
|
||||
class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
|
||||
class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
id = primary_key("QueryResult")
|
||||
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
|
||||
org = db.relationship(Organization)
|
||||
@@ -332,8 +324,8 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
|
||||
data_source = db.relationship(DataSource, backref=backref("query_results"))
|
||||
query_hash = Column(db.String(32), index=True)
|
||||
query_text = Column("query", db.Text)
|
||||
_data = Column("data", db.Text)
|
||||
runtime = Column(postgresql.DOUBLE_PRECISION)
|
||||
data = Column(JSONText, nullable=True)
|
||||
runtime = Column(DOUBLE_PRECISION)
|
||||
retrieved_at = Column(db.DateTime(True))
|
||||
|
||||
__tablename__ = "query_results"
|
||||
@@ -474,11 +466,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
last_modified_by = db.relationship(User, backref="modified_queries", foreign_keys=[last_modified_by_id])
|
||||
is_archived = Column(db.Boolean, default=False, index=True)
|
||||
is_draft = Column(db.Boolean, default=True, index=True)
|
||||
schedule = Column(MutableDict.as_mutable(PseudoJSON), nullable=True)
|
||||
interval = pseudo_json_cast_property(db.Integer, "schedule", "interval", default=0)
|
||||
schedule = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
interval = json_cast_property(db.Integer, "schedule", "interval", default=0)
|
||||
schedule_failures = Column(db.Integer, default=0)
|
||||
visualizations = db.relationship("Visualization", cascade="all, delete-orphan")
|
||||
options = Column(MutableDict.as_mutable(PseudoJSON), default={})
|
||||
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
search_vector = Column(
|
||||
TSVectorType(
|
||||
"id",
|
||||
@@ -489,7 +481,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
),
|
||||
nullable=True,
|
||||
)
|
||||
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
|
||||
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
|
||||
|
||||
query_class = SearchBaseQuery
|
||||
__tablename__ = "queries"
|
||||
@@ -525,7 +517,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
name="Table",
|
||||
description="",
|
||||
type="TABLE",
|
||||
options="{}",
|
||||
options={},
|
||||
)
|
||||
)
|
||||
return query
|
||||
@@ -591,11 +583,12 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
@classmethod
|
||||
def past_scheduled_queries(cls):
|
||||
now = utils.utcnow()
|
||||
queries = Query.query.filter(Query.schedule.isnot(None)).order_by(Query.id)
|
||||
queries = Query.query.filter(func.jsonb_typeof(Query.schedule) != "null").order_by(Query.id)
|
||||
return [
|
||||
query
|
||||
for query in queries
|
||||
if query.schedule["until"] is not None
|
||||
if "until" in query.schedule
|
||||
and query.schedule["until"] is not None
|
||||
and pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d")) <= now
|
||||
]
|
||||
|
||||
@@ -603,7 +596,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
def outdated_queries(cls):
|
||||
queries = (
|
||||
Query.query.options(joinedload(Query.latest_query_data).load_only("retrieved_at"))
|
||||
.filter(Query.schedule.isnot(None))
|
||||
.filter(func.jsonb_typeof(Query.schedule) != "null")
|
||||
.order_by(Query.id)
|
||||
.all()
|
||||
)
|
||||
@@ -738,6 +731,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
queries = Query.query.filter(
|
||||
Query.query_hash == query_result.query_hash,
|
||||
Query.data_source == query_result.data_source,
|
||||
Query.is_archived.is_(False),
|
||||
)
|
||||
|
||||
for q in queries:
|
||||
@@ -830,7 +824,20 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
def update_query_hash(self):
|
||||
should_apply_auto_limit = self.options.get("apply_auto_limit", False) if self.options else False
|
||||
query_runner = self.data_source.query_runner if self.data_source else BaseQueryRunner({})
|
||||
self.query_hash = query_runner.gen_query_hash(self.query_text, should_apply_auto_limit)
|
||||
query_text = self.query_text
|
||||
|
||||
parameters_dict = {p["name"]: p.get("value") for p in self.parameters} if self.options else {}
|
||||
if any(parameters_dict):
|
||||
try:
|
||||
query_text = self.parameterized.apply(parameters_dict).query
|
||||
except InvalidParameterError as e:
|
||||
logging.info(f"Unable to update hash for query {self.id} because of invalid parameters: {str(e)}")
|
||||
except QueryDetachedFromDataSourceError as e:
|
||||
logging.info(
|
||||
f"Unable to update hash for query {self.id} because of dropdown query {e.query_id} is unattached from datasource"
|
||||
)
|
||||
|
||||
self.query_hash = query_runner.gen_query_hash(query_text, should_apply_auto_limit)
|
||||
|
||||
|
||||
@listens_for(Query, "before_insert")
|
||||
@@ -935,7 +942,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
query_rel = db.relationship(Query, backref=backref("alerts", cascade="all"))
|
||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||
user = db.relationship(User, backref="alerts")
|
||||
options = Column(MutableDict.as_mutable(PseudoJSON))
|
||||
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
state = Column(db.String(255), default=UNKNOWN_STATE)
|
||||
subscriptions = db.relationship("AlertSubscription", cascade="all, delete-orphan")
|
||||
last_triggered_at = Column(db.DateTime(True), nullable=True)
|
||||
@@ -1046,13 +1053,13 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
|
||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||
user = db.relationship(User)
|
||||
# layout is no longer used, but kept so we know how to render old dashboards.
|
||||
layout = Column(db.Text)
|
||||
layout = Column(MutableList.as_mutable(JSONB), default=[])
|
||||
dashboard_filters_enabled = Column(db.Boolean, default=False)
|
||||
is_archived = Column(db.Boolean, default=False, index=True)
|
||||
is_draft = Column(db.Boolean, default=True, index=True)
|
||||
widgets = db.relationship("Widget", backref="dashboard", lazy="dynamic")
|
||||
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
|
||||
options = Column(MutableDict.as_mutable(postgresql.JSON), server_default="{}", default={})
|
||||
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
|
||||
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
|
||||
__tablename__ = "dashboards"
|
||||
__mapper_args__ = {"version_id_col": version}
|
||||
@@ -1130,6 +1137,21 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
|
||||
def get_by_slug_and_org(cls, slug, org):
|
||||
return cls.query.filter(cls.slug == slug, cls.org == org).one()
|
||||
|
||||
def fork(self, user):
|
||||
forked_list = ["org", "layout", "dashboard_filters_enabled", "tags"]
|
||||
|
||||
kwargs = {a: getattr(self, a) for a in forked_list}
|
||||
forked_dashboard = Dashboard(name="Copy of (#{}) {}".format(self.id, self.name), user=user, **kwargs)
|
||||
|
||||
for w in self.widgets:
|
||||
forked_w = w.copy(forked_dashboard.id)
|
||||
fw = Widget(**forked_w)
|
||||
db.session.add(fw)
|
||||
|
||||
forked_dashboard.slug = forked_dashboard.id
|
||||
db.session.add(forked_dashboard)
|
||||
return forked_dashboard
|
||||
|
||||
@hybrid_property
|
||||
def lowercase_name(self):
|
||||
"Optional property useful for sorting purposes."
|
||||
@@ -1150,7 +1172,7 @@ class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
query_rel = db.relationship(Query, back_populates="visualizations")
|
||||
name = Column(db.String(255))
|
||||
description = Column(db.String(4096), nullable=True)
|
||||
options = Column(db.Text)
|
||||
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
|
||||
__tablename__ = "visualizations"
|
||||
|
||||
@@ -1177,7 +1199,7 @@ class Widget(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
visualization = db.relationship(Visualization, backref=backref("widgets", cascade="delete"))
|
||||
text = Column(db.Text, nullable=True)
|
||||
width = Column(db.Integer)
|
||||
options = Column(db.Text)
|
||||
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
dashboard_id = Column(key_type("Dashboard"), db.ForeignKey("dashboards.id"), index=True)
|
||||
|
||||
__tablename__ = "widgets"
|
||||
@@ -1189,6 +1211,15 @@ class Widget(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
def get_by_id_and_org(cls, object_id, org):
|
||||
return super(Widget, cls).get_by_id_and_org(object_id, org, Dashboard)
|
||||
|
||||
def copy(self, dashboard_id):
|
||||
return {
|
||||
"options": self.options,
|
||||
"width": self.width,
|
||||
"text": self.text,
|
||||
"visualization_id": self.visualization_id,
|
||||
"dashboard_id": dashboard_id,
|
||||
}
|
||||
|
||||
|
||||
@generic_repr("id", "object_type", "object_id", "action", "user_id", "org_id", "created_at")
|
||||
class Event(db.Model):
|
||||
@@ -1200,7 +1231,7 @@ class Event(db.Model):
|
||||
action = Column(db.String(255))
|
||||
object_type = Column(db.String(255))
|
||||
object_id = Column(db.String(255), nullable=True)
|
||||
additional_properties = Column(MutableDict.as_mutable(PseudoJSON), nullable=True, default={})
|
||||
additional_properties = Column(MutableDict.as_mutable(JSONB), nullable=True, default={})
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = "events"
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import functools
|
||||
|
||||
from flask_sqlalchemy import BaseQuery, SQLAlchemy
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import object_session
|
||||
from sqlalchemy.pool import NullPool
|
||||
from sqlalchemy_searchable import SearchQueryMixin, make_searchable, vectorizer
|
||||
|
||||
from redash import settings
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
|
||||
class RedashSQLAlchemy(SQLAlchemy):
|
||||
@@ -28,7 +28,10 @@ class RedashSQLAlchemy(SQLAlchemy):
|
||||
return options
|
||||
|
||||
|
||||
db = RedashSQLAlchemy(session_options={"expire_on_commit": False})
|
||||
db = RedashSQLAlchemy(
|
||||
session_options={"expire_on_commit": False},
|
||||
engine_options={"json_serializer": json_dumps, "json_deserializer": json_loads},
|
||||
)
|
||||
# Make sure the SQLAlchemy mappers are all properly configured first.
|
||||
# This is required by SQLAlchemy-Searchable as it adds DDL listeners
|
||||
# on the configuration phase of models.
|
||||
@@ -50,7 +53,7 @@ def integer_vectorizer(column):
|
||||
return db.func.cast(column, db.Text)
|
||||
|
||||
|
||||
@vectorizer(postgresql.UUID)
|
||||
@vectorizer(UUID)
|
||||
def uuid_vectorizer(column):
|
||||
return db.func.cast(column, db.Text)
|
||||
|
||||
@@ -68,7 +71,7 @@ def gfk_type(cls):
|
||||
return cls
|
||||
|
||||
|
||||
class GFKBase(object):
|
||||
class GFKBase:
|
||||
"""
|
||||
Compatibility with 'generic foreign key' approach Peewee used.
|
||||
"""
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.inspection import inspect
|
||||
from sqlalchemy_utils.models import generic_repr
|
||||
|
||||
from .base import Column, GFKBase, db, key_type, primary_key
|
||||
from .types import PseudoJSON
|
||||
|
||||
|
||||
@generic_repr("id", "object_type", "object_id", "created_at")
|
||||
@@ -13,7 +13,7 @@ class Change(GFKBase, db.Model):
|
||||
object_version = Column(db.Integer, default=0)
|
||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||
user = db.relationship("User", backref="changes")
|
||||
change = Column(PseudoJSON)
|
||||
change = Column(JSONB)
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = "changes"
|
||||
@@ -45,7 +45,7 @@ class Change(GFKBase, db.Model):
|
||||
)
|
||||
|
||||
|
||||
class ChangeTrackingMixin(object):
|
||||
class ChangeTrackingMixin:
|
||||
skipped_fields = ("id", "created_at", "updated_at", "version")
|
||||
_clean_values = None
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from sqlalchemy.event import listens_for
|
||||
from .base import Column, db
|
||||
|
||||
|
||||
class TimestampMixin(object):
|
||||
class TimestampMixin:
|
||||
updated_at = Column(db.DateTime(True), default=db.func.now(), nullable=False)
|
||||
created_at = Column(db.DateTime(True), default=db.func.now(), nullable=False)
|
||||
|
||||
@@ -17,7 +17,7 @@ def timestamp_before_update(mapper, connection, target):
|
||||
target.updated_at = db.func.now()
|
||||
|
||||
|
||||
class BelongsToOrgMixin(object):
|
||||
class BelongsToOrgMixin:
|
||||
@classmethod
|
||||
def get_by_id_and_org(cls, object_id, org, org_cls=None):
|
||||
query = cls.query.filter(cls.id == object_id)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.orm.attributes import flag_modified
|
||||
from sqlalchemy_utils.models import generic_repr
|
||||
|
||||
@@ -5,7 +6,7 @@ from redash.settings.organization import settings as org_settings
|
||||
|
||||
from .base import Column, db, primary_key
|
||||
from .mixins import TimestampMixin
|
||||
from .types import MutableDict, PseudoJSON
|
||||
from .types import MutableDict
|
||||
from .users import Group, User
|
||||
|
||||
|
||||
@@ -17,7 +18,7 @@ class Organization(TimestampMixin, db.Model):
|
||||
id = primary_key("Organization")
|
||||
name = Column(db.String(255))
|
||||
slug = Column(db.String(255), unique=True)
|
||||
settings = Column(MutableDict.as_mutable(PseudoJSON))
|
||||
settings = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
groups = db.relationship("Group", lazy="dynamic")
|
||||
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)")
|
||||
|
||||
|
||||
@@ -84,26 +84,17 @@ def _is_number(string):
|
||||
if isinstance(string, Number):
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
float(string)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
float(string)
|
||||
return True
|
||||
|
||||
|
||||
def _is_date(string):
|
||||
try:
|
||||
parse(string)
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
parse(string)
|
||||
return True
|
||||
|
||||
|
||||
def _is_date_range(obj):
|
||||
try:
|
||||
return _is_date(obj["start"]) and _is_date(obj["end"])
|
||||
except (KeyError, TypeError):
|
||||
return False
|
||||
return _is_date(obj["start"]) and _is_date(obj["end"])
|
||||
|
||||
|
||||
def _is_value_within_options(value, dropdown_options, allow_list=False):
|
||||
@@ -112,7 +103,7 @@ def _is_value_within_options(value, dropdown_options, allow_list=False):
|
||||
return str(value) in dropdown_options
|
||||
|
||||
|
||||
class ParameterizedQuery(object):
|
||||
class ParameterizedQuery:
|
||||
def __init__(self, template, schema=None, org=None):
|
||||
self.schema = schema or []
|
||||
self.org = org
|
||||
@@ -168,7 +159,14 @@ class ParameterizedQuery(object):
|
||||
|
||||
validate = validators.get(definition["type"], lambda x: False)
|
||||
|
||||
return validate(value)
|
||||
try:
|
||||
# multiple error types can be raised here; but we want to convert
|
||||
# all except QueryDetached to InvalidParameterError in `apply`
|
||||
return validate(value)
|
||||
except QueryDetachedFromDataSourceError:
|
||||
raise
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_safe(self):
|
||||
|
||||