Compare commits
324 Commits
release/10
...
23.09.0-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab71bded7d | ||
|
|
7b722a1067 | ||
|
|
c70d397c72 | ||
|
|
abe70ab3ca | ||
|
|
fcbe726eb2 | ||
|
|
528807f336 | ||
|
|
28db934698 | ||
|
|
182d84226b | ||
|
|
5b110b61f0 | ||
|
|
d8b10a0f16 | ||
|
|
972a49bb9d | ||
|
|
45e791b675 | ||
|
|
b73d68f056 | ||
|
|
0258dca82a | ||
|
|
2d6f5b091c | ||
|
|
1ef63fc3f4 | ||
|
|
fdd1d29693 | ||
|
|
e18cd8f248 | ||
|
|
5eeeb5c62e | ||
|
|
1af49e9ddb | ||
|
|
7eae598546 | ||
|
|
0ad43de229 | ||
|
|
63140260eb | ||
|
|
caf8097c9d | ||
|
|
4107265feb | ||
|
|
5d8364437a | ||
|
|
fcf847eaaf | ||
|
|
9751678c44 | ||
|
|
f49075bada | ||
|
|
772680bbd2 | ||
|
|
0586b43b75 | ||
|
|
0f88a23835 | ||
|
|
4a5c9c2630 | ||
|
|
f8934b8312 | ||
|
|
d333660473 | ||
|
|
f4a930ddeb | ||
|
|
113146e4b8 | ||
|
|
126fe9310f | ||
|
|
0d1ce4d98c | ||
|
|
acf77f85ff | ||
|
|
71bf65b496 | ||
|
|
204e5c1fb9 | ||
|
|
37fa1ec057 | ||
|
|
f4ee891d68 | ||
|
|
196bfece30 | ||
|
|
1726aef0fc | ||
|
|
afc6d878c2 | ||
|
|
9a7d2cdc02 | ||
|
|
2b8aa5cb32 | ||
|
|
39477c73ad | ||
|
|
353ea868ff | ||
|
|
7dfacfc531 | ||
|
|
ad39059558 | ||
|
|
a9a348cd64 | ||
|
|
4155507695 | ||
|
|
f6ba9501da | ||
|
|
ae29eb3dfb | ||
|
|
ea3d825a78 | ||
|
|
0d699328b8 | ||
|
|
8f71e14887 | ||
|
|
5561b5fc55 | ||
|
|
ea4ee7ce9b | ||
|
|
b43cb1797e | ||
|
|
875973bfcd | ||
|
|
1b064da901 | ||
|
|
55690db1d8 | ||
|
|
a5a9be352d | ||
|
|
72db9757f8 | ||
|
|
ffbf0fbe45 | ||
|
|
d0f5215cd8 | ||
|
|
6d495bc83d | ||
|
|
e012d25585 | ||
|
|
7a421cf6d0 | ||
|
|
58b505536f | ||
|
|
a7c15b078a | ||
|
|
03d54d3313 | ||
|
|
fbe2f4d808 | ||
|
|
5cd38b14b0 | ||
|
|
a8e1077bb7 | ||
|
|
f572d88d8e | ||
|
|
ba1b496f51 | ||
|
|
050b9e8716 | ||
|
|
239f8abf70 | ||
|
|
2795e1b7a0 | ||
|
|
4a847388fe | ||
|
|
2eed83bd7c | ||
|
|
34d380d427 | ||
|
|
93a2901f6d | ||
|
|
2946713a15 | ||
|
|
1b8f0ac2e9 | ||
|
|
0701ee9b28 | ||
|
|
ef7e38de49 | ||
|
|
ff1531bee4 | ||
|
|
f172f15c7f | ||
|
|
ae10477e50 | ||
|
|
0a4d250268 | ||
|
|
177f33a460 | ||
|
|
6e4f96405d | ||
|
|
3446e7e569 | ||
|
|
ef06dff433 | ||
|
|
afa723c435 | ||
|
|
4fb78387aa | ||
|
|
f8e9887feb | ||
|
|
f5c53efb3e | ||
|
|
37fd7f74dd | ||
|
|
3bddbcb025 | ||
|
|
f1477c825e | ||
|
|
416e6cb864 | ||
|
|
d4c69beef9 | ||
|
|
868453077a | ||
|
|
8973772548 | ||
|
|
e2c824e1d5 | ||
|
|
ed0075d495 | ||
|
|
de1e6ba018 | ||
|
|
2d6928469a | ||
|
|
3370a34b6e | ||
|
|
5f2aad2009 | ||
|
|
58fc8f4aee | ||
|
|
a32c0dfb58 | ||
|
|
34c20afdd8 | ||
|
|
7871e80abf | ||
|
|
00eab75127 | ||
|
|
f3a768edb8 | ||
|
|
518fb33c7e | ||
|
|
9829a0957a | ||
|
|
4113bb532c | ||
|
|
416126abd3 | ||
|
|
02c8f71710 | ||
|
|
7248be14bb | ||
|
|
448bb99b7e | ||
|
|
f2e31a602e | ||
|
|
79ef3e4eb0 | ||
|
|
b30622e531 | ||
|
|
d2322c9904 | ||
|
|
bac15db21f | ||
|
|
b284dfe40d | ||
|
|
81da13b461 | ||
|
|
7f4ade5f1f | ||
|
|
8376e41684 | ||
|
|
c8eb445ce9 | ||
|
|
e4302d9163 | ||
|
|
be306e9284 | ||
|
|
91eee2b49e | ||
|
|
bee833a6c1 | ||
|
|
37f008cccb | ||
|
|
77a2c24d47 | ||
|
|
05c9b35e42 | ||
|
|
f41eab7054 | ||
|
|
cd0bbc2621 | ||
|
|
20dbb461e9 | ||
|
|
0dd8614d5d | ||
|
|
281b552346 | ||
|
|
3e8222de17 | ||
|
|
4869a652c0 | ||
|
|
0c223b6af7 | ||
|
|
ff6377b6e2 | ||
|
|
f3ba10ff32 | ||
|
|
9736bc76f7 | ||
|
|
87adad9afc | ||
|
|
a63d7d9ad8 | ||
|
|
d3f118a74b | ||
|
|
17a03628e4 | ||
|
|
255f2221c6 | ||
|
|
698498d896 | ||
|
|
4092d418f6 | ||
|
|
af243be0b3 | ||
|
|
897e3dbd3b | ||
|
|
cbe3093a5d | ||
|
|
f5fd10bb6c | ||
|
|
a6447b46be | ||
|
|
7567a8a76a | ||
|
|
6237d54347 | ||
|
|
0bdd3bd826 | ||
|
|
1e33eee479 | ||
|
|
f51b5ad1bb | ||
|
|
1ab9036325 | ||
|
|
c8516d38a7 | ||
|
|
095ac2ecf0 | ||
|
|
02d128e7ae | ||
|
|
d5b821e30a | ||
|
|
05526b557e | ||
|
|
60531a739d | ||
|
|
1b3215f79f | ||
|
|
39f4530562 | ||
|
|
6dd6a4c28b | ||
|
|
fc39e36771 | ||
|
|
3e3cca4023 | ||
|
|
c707cccfbf | ||
|
|
32b3e56c97 | ||
|
|
9d5754793f | ||
|
|
d1e533264d | ||
|
|
591b607dd5 | ||
|
|
ad6b12c5ad | ||
|
|
a1a00c6819 | ||
|
|
9b2f635692 | ||
|
|
7f40837d3f | ||
|
|
a944658265 | ||
|
|
a7681a688e | ||
|
|
1b97d9ce04 | ||
|
|
b4801dd2b8 | ||
|
|
c922521dbd | ||
|
|
89e7669ec1 | ||
|
|
99be51ebc5 | ||
|
|
29c21db813 | ||
|
|
e639a789e7 | ||
|
|
bc9460b04c | ||
|
|
11c50567c3 | ||
|
|
90cd27fa25 | ||
|
|
26010f793e | ||
|
|
a45a95af68 | ||
|
|
24fe1dd121 | ||
|
|
5af8764c10 | ||
|
|
5b3e47dc0f | ||
|
|
c775eedec1 | ||
|
|
537d153986 | ||
|
|
afef3dc6d4 | ||
|
|
cdd4849f96 | ||
|
|
6b13d0ad96 | ||
|
|
73f49cbf0c | ||
|
|
d92fc98b13 | ||
|
|
350ddd0483 | ||
|
|
4d0ce10d97 | ||
|
|
a34deb25d6 | ||
|
|
79b01406fc | ||
|
|
2881599aa3 | ||
|
|
0f3452f00f | ||
|
|
e8621dba1a | ||
|
|
d6432482bf | ||
|
|
b9bdfe83cc | ||
|
|
66da3bb7cd | ||
|
|
a1e27ae1ed | ||
|
|
bbd0a21831 | ||
|
|
ee601ec206 | ||
|
|
241dcfacd9 | ||
|
|
112b9ed1ba | ||
|
|
24b6ef7ae7 | ||
|
|
4c3fd833df | ||
|
|
c2c7f44d5c | ||
|
|
46f67fd44b | ||
|
|
675838619e | ||
|
|
b33bd1b02e | ||
|
|
28e63e3d76 | ||
|
|
4d11c94be0 | ||
|
|
f3892e00a5 | ||
|
|
f45bd27e68 | ||
|
|
bc909a13a3 | ||
|
|
962f13eed0 | ||
|
|
e8071dcb12 | ||
|
|
3444f2b06c | ||
|
|
c46d66afec | ||
|
|
64c24b77f9 | ||
|
|
ad7d30f91d | ||
|
|
5b9fd40dc7 | ||
|
|
0b86c76552 | ||
|
|
35b2430ff9 | ||
|
|
65d0eb72f5 | ||
|
|
8487876e7f | ||
|
|
c08ef9b502 | ||
|
|
28b0a2379d | ||
|
|
0dfe726ec8 | ||
|
|
a1e3369ba3 | ||
|
|
7ec443c800 | ||
|
|
d6dbc64cfe | ||
|
|
82361e7054 | ||
|
|
5cf13afafe | ||
|
|
328099137d | ||
|
|
a863c8c08c | ||
|
|
71458e5697 | ||
|
|
75cb59f4be | ||
|
|
2935844e88 | ||
|
|
4186f8303e | ||
|
|
0712abb359 | ||
|
|
9abc4f5f1e | ||
|
|
f0a390b11a | ||
|
|
3624f8f2be | ||
|
|
65f7b6c5af | ||
|
|
412c82940a | ||
|
|
e2bad61e5b | ||
|
|
173cbdb2d6 | ||
|
|
fc37c1ecfc | ||
|
|
c4bfd4f3e1 | ||
|
|
bdd1244604 | ||
|
|
6806ebd244 | ||
|
|
b2cc42e383 | ||
|
|
cabe33394b | ||
|
|
46ea3b1f0b | ||
|
|
e6ebef1e5a | ||
|
|
b713f6b240 | ||
|
|
5de85543a5 | ||
|
|
175a4da49b | ||
|
|
49fe29579a | ||
|
|
4164a42aab | ||
|
|
6797f32ea6 | ||
|
|
ea07e7e19b | ||
|
|
26ac8ab1cd | ||
|
|
12c4750684 | ||
|
|
2b5d1c03c1 | ||
|
|
f77f1b5ca1 | ||
|
|
e28e4227bf | ||
|
|
4fddff104a | ||
|
|
8ef9a1d398 | ||
|
|
965db26cab | ||
|
|
64586500a7 | ||
|
|
df472eb1d4 | ||
|
|
7487550ad7 | ||
|
|
61bbb5aa7a | ||
|
|
ce60d20c4e | ||
|
|
da696ff7f8 | ||
|
|
ed654a7b78 | ||
|
|
3d032b69e5 | ||
|
|
86514207a3 | ||
|
|
2e67227f1b | ||
|
|
86b2c4d06e | ||
|
|
3c248acf21 | ||
|
|
39ca71c356 | ||
|
|
143d22db04 | ||
|
|
7cac149cef | ||
|
|
a0a28b09b4 | ||
|
|
e9bcc3c924 | ||
|
|
380345bb08 | ||
|
|
0f41f25720 | ||
|
|
7445080d1a | ||
|
|
b9cb8191f5 | ||
|
|
ff7c5e8367 |
12
.ci/Dockerfile.cypress
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM cypress/browsers:node16.18.0-chrome90-ff88
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json yarn.lock .yarnrc $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm install yarn@1.22.19 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
RUN ./node_modules/.bin/cypress verify
|
||||
@@ -12,12 +12,15 @@ services:
|
||||
PYTHONUNBUFFERED: 0
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
redis:
|
||||
image: redis:3.0-alpine
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: postgres:9.5.6-alpine
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
@@ -9,7 +9,8 @@ x-redash-service: &redash-service
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
@@ -44,7 +45,7 @@ services:
|
||||
ipc: host
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: .circleci/Dockerfile.cypress
|
||||
dockerfile: .ci/Dockerfile.cypress
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
@@ -64,9 +65,11 @@ services:
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY}
|
||||
redis:
|
||||
image: redis:3.0-alpine
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: postgres:9.5.6-alpine
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
@@ -1,7 +1,11 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG=$VERSION.b$CIRCLE_BUILD_NUM
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||
|
||||
if [ $CIRCLE_BRANCH = master ] || [ $CIRCLE_BRANCH = preview-image ]
|
||||
@@ -14,4 +18,4 @@ else
|
||||
docker push redash/redash:$VERSION_TAG
|
||||
fi
|
||||
|
||||
echo "Built: $VERSION_TAG"
|
||||
echo "Built: $VERSION_TAG"
|
||||
@@ -1,12 +0,0 @@
|
||||
FROM cypress/browsers:node14.0.0-chrome84
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json package-lock.json $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm ci > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
RUN ./node_modules/.bin/cypress verify
|
||||
@@ -1,177 +0,0 @@
|
||||
version: 2.0
|
||||
|
||||
build-docker-image-job: &build-docker-image-job
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install python3-pip
|
||||
- run: sudo pip3 install -r requirements_bundles.txt
|
||||
- run: .circleci/update_version
|
||||
- run: npm run bundle
|
||||
- run: .circleci/docker_build
|
||||
jobs:
|
||||
backend-lint:
|
||||
docker:
|
||||
- image: circleci/python:3.7.0
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo pip install flake8
|
||||
- run: ./bin/flake8_tests.sh
|
||||
backend-unit-tests:
|
||||
environment:
|
||||
COMPOSE_FILE: .circleci/docker-compose.circle.yml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
docker:
|
||||
- image: circleci/buildpack-deps:xenial
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run:
|
||||
name: Build Docker Images
|
||||
command: |
|
||||
set -x
|
||||
docker-compose build --build-arg skip_ds_deps=true --build-arg skip_frontend_build=true
|
||||
docker-compose up -d
|
||||
sleep 10
|
||||
- run:
|
||||
name: Create Test Database
|
||||
command: docker-compose run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- run:
|
||||
name: List Enabled Query Runners
|
||||
command: docker-compose run --rm redash manage ds list_types
|
||||
- run:
|
||||
name: Run Tests
|
||||
command: docker-compose run --name tests redash tests --junitxml=junit.xml --cov-report xml --cov=redash --cov-config .coveragerc tests/
|
||||
- run:
|
||||
name: Copy Test Results
|
||||
command: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
when: always
|
||||
- store_test_results:
|
||||
path: /tmp/test-results
|
||||
- store_artifacts:
|
||||
path: coverage.xml
|
||||
frontend-lint:
|
||||
environment:
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- checkout
|
||||
- run: mkdir -p /tmp/test-results/eslint
|
||||
- run: npm ci
|
||||
- run: npm run lint:ci
|
||||
- store_test_results:
|
||||
path: /tmp/test-results
|
||||
frontend-unit-tests:
|
||||
environment:
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install python3-pip
|
||||
- run: sudo pip3 install -r requirements_bundles.txt
|
||||
- run: npm ci
|
||||
- run: npm run bundle
|
||||
- run:
|
||||
name: Run App Tests
|
||||
command: npm test
|
||||
- run:
|
||||
name: Run Visualizations Tests
|
||||
command: (cd viz-lib && npm test)
|
||||
- run: npm run lint
|
||||
frontend-e2e-tests:
|
||||
environment:
|
||||
COMPOSE_FILE: .circleci/docker-compose.cypress.yml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
|
||||
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
|
||||
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run:
|
||||
name: Enable Code Coverage report for master branch
|
||||
command: |
|
||||
if [ "$CIRCLE_BRANCH" = "master" ]; then
|
||||
echo 'export CODE_COVERAGE=true' >> $BASH_ENV
|
||||
source $BASH_ENV
|
||||
fi
|
||||
- run:
|
||||
name: Install npm dependencies
|
||||
command: |
|
||||
npm ci
|
||||
- run:
|
||||
name: Setup Redash server
|
||||
command: |
|
||||
npm run cypress build
|
||||
npm run cypress start -- --skip-db-seed
|
||||
docker-compose run cypress npm run cypress db-seed
|
||||
- run:
|
||||
name: Execute Cypress tests
|
||||
command: npm run cypress run-ci
|
||||
- run:
|
||||
name: "Failure: output container logs to console"
|
||||
command: |
|
||||
docker-compose logs
|
||||
when: on_fail
|
||||
- run:
|
||||
name: Copy Code Coverage results
|
||||
command: |
|
||||
docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
when: always
|
||||
- store_artifacts:
|
||||
path: coverage
|
||||
build-docker-image: *build-docker-image-job
|
||||
build-preview-docker-image: *build-docker-image-job
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- backend-lint
|
||||
- backend-unit-tests:
|
||||
requires:
|
||||
- backend-lint
|
||||
- frontend-lint
|
||||
- frontend-unit-tests:
|
||||
requires:
|
||||
- backend-lint
|
||||
- frontend-lint
|
||||
- frontend-e2e-tests:
|
||||
requires:
|
||||
- frontend-lint
|
||||
- build-preview-docker-image:
|
||||
requires:
|
||||
- backend-unit-tests
|
||||
- frontend-unit-tests
|
||||
- frontend-e2e-tests
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- hold:
|
||||
type: approval
|
||||
requires:
|
||||
- backend-unit-tests
|
||||
- frontend-unit-tests
|
||||
- frontend-e2e-tests
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- /release\/.*/
|
||||
- build-docker-image:
|
||||
requires:
|
||||
- hold
|
||||
6
.github/ISSUE_TEMPLATE/---bug_report.md
vendored
@@ -7,10 +7,10 @@ about: Report reproducible software issues so we can improve
|
||||
|
||||
We use GitHub only for bug reports 🐛
|
||||
|
||||
Anything else should be posted to https://discuss.redash.io 👫
|
||||
Anything else should be a discussion: https://github.com/getredash/redash/discussions/ 👫
|
||||
|
||||
🚨For support, help & questions use https://discuss.redash.io/c/support
|
||||
💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests
|
||||
🚨For support, help & questions use https://github.com/getredash/redash/discussions/categories/q-a
|
||||
💡For feature requests & ideas use https://github.com/getredash/redash/discussions/categories/ideas
|
||||
|
||||
**Found a security vulnerability?** Please email security@redash.io to report any security vulnerabilities. We will acknowledge receipt of your vulnerability and strive to send you regular updates about our progress. If you're curious about the status of your disclosure please feel free to email us again. If you want to encrypt your disclosure email, you can use this PGP key.
|
||||
|
||||
|
||||
10
.github/ISSUE_TEMPLATE/--anything_else.md
vendored
@@ -1,17 +1,17 @@
|
||||
---
|
||||
name: "\U0001F4A1Anything else"
|
||||
about: "For help, support, features & ideas - please use https://discuss.redash.io \U0001F46B "
|
||||
about: "For help, support, features & ideas - please use Discussions \U0001F46B "
|
||||
labels: "Support Question"
|
||||
---
|
||||
|
||||
We use GitHub only for bug reports 🐛
|
||||
|
||||
Anything else should be posted to https://discuss.redash.io 👫
|
||||
Anything else should be a discussion: https://github.com/getredash/redash/discussions/ 👫
|
||||
|
||||
🚨For support, help & questions use https://discuss.redash.io/c/support
|
||||
💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests
|
||||
🚨For support, help & questions use https://github.com/getredash/redash/discussions/categories/q-a
|
||||
💡For feature requests & ideas use https://github.com/getredash/redash/discussions/categories/ideas
|
||||
|
||||
Alternatively, check out these resources below. Thanks! 😁.
|
||||
|
||||
- [Forum](https://disucss.redash.io)
|
||||
- [Discussions](https://github.com/getredash/redash/discussions/)
|
||||
- [Knowledge Base](https://redash.io/help)
|
||||
|
||||
17
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,15 +1,26 @@
|
||||
## What type of PR is this? (check all applicable)
|
||||
<!-- Please leave only what's applicable -->
|
||||
## What type of PR is this?
|
||||
<!-- Check all that apply, delete what doesn't apply. -->
|
||||
|
||||
- [ ] Refactor
|
||||
- [ ] Feature
|
||||
- [ ] Bug Fix
|
||||
- [ ] New Query Runner (Data Source)
|
||||
- [ ] New Query Runner (Data Source)
|
||||
- [ ] New Alert Destination
|
||||
- [ ] Other
|
||||
|
||||
## Description
|
||||
<!-- In case of adding / modifying a query runner, please specify which version(s) you expect are compatible. -->
|
||||
|
||||
## How is this tested?
|
||||
|
||||
- [ ] Unit tests (pytest, jest)
|
||||
- [ ] E2E Tests (Cypress)
|
||||
- [ ] Manually
|
||||
- [ ] N/A
|
||||
|
||||
<!-- If Manually, please describe. -->
|
||||
|
||||
## Related Tickets & Documents
|
||||
<!-- If applicable, please include a link to your documentation PR against getredash/website -->
|
||||
|
||||
## Mobile & Desktop Screenshots/Recordings (if there are UI changes)
|
||||
|
||||
23
.github/support.yml
vendored
@@ -1,23 +0,0 @@
|
||||
# Configuration for Support Requests - https://github.com/dessant/support-requests
|
||||
|
||||
# Label used to mark issues as support requests
|
||||
supportLabel: Support Question
|
||||
|
||||
# Comment to post on issues marked as support requests, `{issue-author}` is an
|
||||
# optional placeholder. Set to `false` to disable
|
||||
supportComment: >
|
||||
:wave: @{issue-author}, we use the issue tracker exclusively for bug reports
|
||||
and planned work. However, this issue appears to be a support request.
|
||||
Please use [our forum](https://discuss.redash.io) to get help.
|
||||
|
||||
# Close issues marked as support requests
|
||||
close: true
|
||||
|
||||
# Lock issues marked as support requests
|
||||
lock: false
|
||||
|
||||
# Assign `off-topic` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: true
|
||||
|
||||
# Repository to extend settings from
|
||||
# _extends: repo
|
||||
153
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,153 @@
|
||||
name: Tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
env:
|
||||
NODE_VERSION: 16.20.1
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install flake8==6.1.0 black==23.1.0 isort==5.12.0
|
||||
- run: flake8 .
|
||||
- run: black --check .
|
||||
- run: isort --check-only --diff .
|
||||
|
||||
backend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/docker-compose.ci.yml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
docker-compose build --build-arg test_all_deps=true --build-arg skip_frontend_build=true
|
||||
docker-compose up -d
|
||||
sleep 10
|
||||
- name: Create Test Database
|
||||
run: docker-compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- name: List Enabled Query Runners
|
||||
run: docker-compose -p redash run --rm redash manage ds list_types
|
||||
- name: Run Tests
|
||||
run: docker-compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
- name: Copy Test Results
|
||||
run: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.xml
|
||||
|
||||
frontend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
- name: Run Visualizations Tests
|
||||
run: cd viz-lib && yarn test
|
||||
- run: yarn lint
|
||||
|
||||
frontend-e2e-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/docker-compose.cypress.yml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
|
||||
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
|
||||
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Enable Code Coverage Report For Master Branch
|
||||
if: endsWith(github.ref, '/master')
|
||||
run: |
|
||||
echo "CODE_COVERAGE=true" >> $GITHUB_ENV
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
set -x
|
||||
yarn cypress build
|
||||
yarn cypress start -- --skip-db-seed
|
||||
docker-compose run cypress yarn cypress db-seed
|
||||
- name: Execute Cypress Tests
|
||||
run: yarn cypress run-ci
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker-compose logs
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage
|
||||
26
.github/workflows/periodic-snapshot.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Periodic Snapshot
|
||||
|
||||
# 10 minutes after midnight on the first of every month
|
||||
on:
|
||||
schedule:
|
||||
- cron: "10 0 1 * *"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
bump-version-and-tag:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: |
|
||||
date="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=$date '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add package.json redash/__init__.py
|
||||
git commit -m "Shapshot: ${date}"
|
||||
git push origin
|
||||
git tag $date
|
||||
git push origin $date
|
||||
19
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
repos:
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.1.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: "migration/.*|.git|viz-lib|node_modules|migrations|bin/upgrade"
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: requirements-txt-fixer
|
||||
@@ -57,6 +57,9 @@ restylers:
|
||||
- migrations/versions
|
||||
- name: prettier
|
||||
image: restyled/restyler-prettier:v1.19.1-2
|
||||
command:
|
||||
- prettier
|
||||
- --write
|
||||
include:
|
||||
- client/app/**/*.js
|
||||
- client/app/**/*.jsx
|
||||
|
||||
2
.yarn/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
@@ -4,19 +4,7 @@ Thank you for taking the time to contribute! :tada::+1:
|
||||
|
||||
The following is a set of guidelines for contributing to Redash. These are guidelines, not rules, please use your best judgement and feel free to propose changes to this document in a pull request.
|
||||
|
||||
## Quick Links:
|
||||
|
||||
- [Feature Requests](https://discuss.redash.io/c/feature-requests)
|
||||
- [Documentation](https://redash.io/help/)
|
||||
- [Blog](https://blog.redash.io/)
|
||||
- [Twitter](https://twitter.com/getredash)
|
||||
|
||||
---
|
||||
:star: If you already here and love the project, please make sure to press the Star button. :star:
|
||||
|
||||
---
|
||||
|
||||
|
||||
:star: If you're already here and love the project, please make sure to press the Star button. :star:
|
||||
## Table of Contents
|
||||
|
||||
[How can I contribute?](#how-can-i-contribute)
|
||||
@@ -32,6 +20,13 @@ The following is a set of guidelines for contributing to Redash. These are guide
|
||||
- [Release Method](#release-method)
|
||||
- [Code of Conduct](#code-of-conduct)
|
||||
|
||||
## Quick Links:
|
||||
|
||||
- [User Forum](https://github.com/getredash/redash/discussions)
|
||||
- [Documentation](https://redash.io/help/)
|
||||
|
||||
|
||||
---
|
||||
## How can I contribute?
|
||||
|
||||
### Reporting Bugs
|
||||
@@ -39,25 +34,54 @@ The following is a set of guidelines for contributing to Redash. These are guide
|
||||
When creating a new bug report, please make sure to:
|
||||
|
||||
- Search for existing issues first. If you find a previous report of your issue, please update the existing issue with additional information instead of creating a new one.
|
||||
- If you are not sure if your issue is really a bug or just some configuration/setup problem, please start a discussion in [the support forum](https://discuss.redash.io/c/support) first. Unless you can provide clear steps to reproduce, it's probably better to start with a thread in the forum and later to open an issue.
|
||||
- If you are not sure if your issue is really a bug or just some configuration/setup problem, please start a [Q&A discussion](https://github.com/getredash/redash/discussions/new?category=q-a) first. Unless you can provide clear steps to reproduce, it's probably better to start with a discussion and later to open an issue.
|
||||
- If you still decide to open an issue, please review the template and guidelines and include as much details as possible.
|
||||
|
||||
### Suggesting Enhancements / Feature Requests
|
||||
|
||||
If you would like to suggest an enhancement or ask for a new feature:
|
||||
|
||||
- Please check [the forum](https://discuss.redash.io/c/feature-requests/5) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- Please check [the Ideas discussions](https://github.com/getredash/redash/discussions/categories/ideas) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- If there is no open thread, you're welcome to start one to have a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*.
|
||||
|
||||
### Pull Requests
|
||||
|
||||
- **Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it.
|
||||
- Include screenshots and animated GIFs in your pull request whenever possible.
|
||||
**Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This is to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it.
|
||||
|
||||
#### Criteria for Review / Merging
|
||||
|
||||
When you open your pull request, please follow this repository’s PR template carefully:
|
||||
|
||||
- Indicate the type of change
|
||||
- If you implement multiple unrelated features, bug fixes, or refactors please split them into individual pull requests.
|
||||
- Describe the change
|
||||
- If fixing a bug, please describe the bug or link to an existing github issue / forum discussion
|
||||
- Include UI screenshots / GIFs whenever possible
|
||||
- Please add [documentation](#documentation) for new features or changes in functionality along with the code.
|
||||
- Please follow existing code style:
|
||||
- Python: we use [Black](https://github.com/psf/black) to auto format the code.
|
||||
- Javascript: we use [Prettier](https://github.com/prettier/prettier) to auto-format the code.
|
||||
|
||||
|
||||
#### Initial Review (1 week)
|
||||
|
||||
During this phase, a team member will apply the “Team Review” label if a pull request meets our criteria or a “Needs More Information” label if not. If more information is required, the team member will comment which criteria have not been met.
|
||||
|
||||
If your pull request receives the “Needs More Information” label, please make the requested changes and then remove the label. This resets the 1 week timer for an initial review.
|
||||
|
||||
Stale pull requests that remain untouched in “Needs More Information” for more than 4 weeks will be closed.
|
||||
|
||||
If a team member closes your pull request, you may reopen it after you have made the changes requested during initial review. After you make these changes, remove the “Needs More Information” label. This again resets the timer for another initial review.
|
||||
|
||||
#### Full Review (2 weeks)
|
||||
|
||||
After the “Team Review” label is applied, a member of the core team will review the PR within 2 weeks.
|
||||
|
||||
Reviews will approve, request changes, or ask questions to discuss areas of uncertainty. After you’ve responded, a member of the team will re-review within one week.
|
||||
|
||||
#### Merging (1 week)
|
||||
|
||||
After your pull request has been approved, a member of the core team will merge the pull request within a week.
|
||||
|
||||
### Documentation
|
||||
|
||||
The project's documentation can be found at [https://redash.io/help/](https://redash.io/help/). The [documentation sources](https://github.com/getredash/website/tree/master/src/pages/kb) are hosted on GitHub. To contribute edits / new pages, you can use GitHub's interface. Click the "Edit on GitHub" link on the documentation page to quickly open the edit interface.
|
||||
|
||||
101
Dockerfile
@@ -1,4 +1,6 @@
|
||||
FROM node:12 as frontend-builder
|
||||
FROM node:16.20.1 as frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.19
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
@@ -10,20 +12,20 @@ RUN useradd -m -d /frontend redash
|
||||
USER redash
|
||||
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json package-lock.json /frontend/
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then npm ci --unsafe-perm; fi
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then npm run build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
|
||||
FROM python:3.7-slim-buster
|
||||
FROM python:3.8-slim-buster
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
@@ -31,72 +33,79 @@ EXPOSE 5000
|
||||
ARG skip_ds_deps
|
||||
# Controls whether to install dev dependencies.
|
||||
ARG skip_dev_deps
|
||||
# Controls whether to install all dependencies for testing.
|
||||
ARG test_all_deps
|
||||
|
||||
RUN useradd --create-home redash
|
||||
|
||||
# Ubuntu packages
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
curl \
|
||||
gnupg \
|
||||
build-essential \
|
||||
pwgen \
|
||||
libffi-dev \
|
||||
sudo \
|
||||
git-core \
|
||||
wget \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# ODBC support:
|
||||
g++ unixodbc-dev \
|
||||
# for SAML
|
||||
xmlsec1 \
|
||||
# Additional packages required for data sources:
|
||||
libssl-dev \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libsasl2-dev \
|
||||
unzip \
|
||||
libsasl2-modules-gssapi-mit && \
|
||||
# MSSQL ODBC Driver:
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
|
||||
curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
|
||||
apt-get update && \
|
||||
ACCEPT_EULA=Y apt-get install -y msodbcsql17 && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
gnupg \
|
||||
build-essential \
|
||||
pwgen \
|
||||
libffi-dev \
|
||||
sudo \
|
||||
git-core \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# ODBC support:
|
||||
g++ unixodbc-dev \
|
||||
# for SAML
|
||||
xmlsec1 \
|
||||
# Additional packages required for data sources:
|
||||
libssl-dev \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libsasl2-dev \
|
||||
unzip \
|
||||
libsasl2-modules-gssapi-mit && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG databricks_odbc_driver_url=https://databricks.com/wp-content/uploads/2.6.10.1010-2/SimbaSparkODBC-2.6.10.1010-2-Debian-64bit.zip
|
||||
RUN wget --quiet $databricks_odbc_driver_url -O /tmp/simba_odbc.zip \
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
|
||||
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/ \
|
||||
&& dpkg -i /tmp/SimbaSparkODBC-*/*.deb \
|
||||
&& echo "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/simba \
|
||||
&& dpkg -i /tmp/simba/*.deb \
|
||||
&& printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
|
||||
&& rm /tmp/simba_odbc.zip \
|
||||
&& rm -rf /tmp/SimbaSparkODBC*
|
||||
&& rm -rf /tmp/simba; fi
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Disalbe PIP Cache and Version Check
|
||||
# Disable PIP Cache and Version Check
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV PIP_NO_CACHE_DIR=1
|
||||
|
||||
# rollback pip version to avoid legacy resolver problem
|
||||
RUN pip install pip==20.2.4;
|
||||
RUN pip install pip==23.1.2;
|
||||
|
||||
# We first copy only the requirements file, to avoid rebuilding on every file change.
|
||||
COPY requirements_all_ds.txt ./
|
||||
RUN if [ "x$skip_ds_deps" = "x" ] ; then pip install -r requirements_all_ds.txt ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi
|
||||
RUN if [ "x$skip_ds_deps" = "x" ] ; then cat requirements_all_ds.txt | sed -e '/^\s*#.*$/d' -e '/^\s*$/d' | xargs -n 1 pip install || true ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi
|
||||
|
||||
COPY requirements_bundles.txt requirements_dev.txt ./
|
||||
|
||||
COPY requirements_dev.txt ./
|
||||
RUN if [ "x$skip_dev_deps" = "x" ] ; then pip install -r requirements_dev.txt ; fi
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
COPY . /app
|
||||
COPY --from=frontend-builder /frontend/client/dist /app/client/dist
|
||||
RUN chown -R redash /app
|
||||
RUN if [ "x$test_all_deps" != "x" ] ; then pip3 install -r requirements.txt -r requirements_dev.txt -r requirements_all_ds.txt ; fi
|
||||
|
||||
COPY --chown=redash . /app
|
||||
COPY --from=frontend-builder --chown=redash /frontend/client/dist /app/client/dist
|
||||
RUN chown redash /app
|
||||
USER redash
|
||||
|
||||
ENTRYPOINT ["/app/bin/docker-entrypoint"]
|
||||
|
||||
3
LICENSE.borders
Normal file
@@ -0,0 +1,3 @@
|
||||
The Bahrain map data used in Redash was downloaded from
|
||||
https://cartographyvectors.com/map/857-bahrain-detailed-boundary in PR #6192.
|
||||
* Free for personal and commercial purpose with attribution.
|
||||
38
Makefile
@@ -1,10 +1,10 @@
|
||||
.PHONY: compose_build up test_db create_database clean down bundle tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build:
|
||||
docker-compose build
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose build
|
||||
|
||||
up:
|
||||
docker-compose up -d --build
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose up -d --build
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
@@ -13,7 +13,7 @@ test_db:
|
||||
done
|
||||
docker-compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
|
||||
create_database:
|
||||
create_database: .env
|
||||
docker-compose run server create_db
|
||||
|
||||
clean:
|
||||
@@ -22,8 +22,13 @@ clean:
|
||||
down:
|
||||
docker-compose down
|
||||
|
||||
bundle:
|
||||
docker-compose run server bin/bundle-extensions
|
||||
.env:
|
||||
printf "REDASH_COOKIE_SECRET=`pwgen -1s 32`\nREDASH_SECRET_KEY=`pwgen -1s 32`\n" >> .env
|
||||
|
||||
env: .env
|
||||
|
||||
format:
|
||||
pre-commit run --all-files
|
||||
|
||||
tests:
|
||||
docker-compose run server tests
|
||||
@@ -34,21 +39,20 @@ lint:
|
||||
backend-unit-tests: up test_db
|
||||
docker-compose run --rm --name tests server tests
|
||||
|
||||
frontend-unit-tests: bundle
|
||||
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm ci
|
||||
npm run bundle
|
||||
npm test
|
||||
frontend-unit-tests:
|
||||
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 yarn --frozen-lockfile
|
||||
yarn test
|
||||
|
||||
test: lint backend-unit-tests frontend-unit-tests
|
||||
|
||||
build: bundle
|
||||
npm run build
|
||||
build:
|
||||
yarn build
|
||||
|
||||
watch: bundle
|
||||
npm run watch
|
||||
watch:
|
||||
yarn watch
|
||||
|
||||
start: bundle
|
||||
npm run start
|
||||
start:
|
||||
yarn start
|
||||
|
||||
redis-cli:
|
||||
docker-compose run --rm redis redis-cli -h redis
|
||||
|
||||
36
README.md
@@ -4,7 +4,7 @@
|
||||
|
||||
[](https://redash.io/help/)
|
||||
[](https://datree.io/?src=badge)
|
||||
[](https://circleci.com/gh/getredash/redash/tree/master)
|
||||
[](https://github.com/getredash/redash/actions)
|
||||
|
||||
Redash is designed to enable anyone, regardless of the level of technical sophistication, to harness the power of data big and small. SQL users leverage Redash to explore, query, visualize, and share data from any data sources. Their work in turn enables anybody in their organization to use the data. Every day, millions of users at thousands of organizations around the world use Redash to develop insights and make data-driven decisions.
|
||||
|
||||
@@ -32,37 +32,51 @@ Redash features:
|
||||
Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help/data-sources/supported-data-sources). It can also be extended to support more. Below is a list of built-in sources:
|
||||
|
||||
- Amazon Athena
|
||||
- Amazon CloudWatch / Insights
|
||||
- Amazon DynamoDB
|
||||
- Amazon Redshift
|
||||
- ArangoDB
|
||||
- Axibase Time Series Database
|
||||
- Cassandra
|
||||
- Apache Cassandra
|
||||
- ClickHouse
|
||||
- CockroachDB
|
||||
- Couchbase
|
||||
- CSV
|
||||
- Databricks (Apache Spark)
|
||||
- Databricks
|
||||
- DB2 by IBM
|
||||
- Druid
|
||||
- Dgraph
|
||||
- Apache Drill
|
||||
- Apache Druid
|
||||
- Eccenca Corporate Memory
|
||||
- Elasticsearch
|
||||
- Exasol
|
||||
- Microsoft Excel
|
||||
- Firebolt
|
||||
- Databend
|
||||
- Google Analytics
|
||||
- Google BigQuery
|
||||
- Google Spreadsheets
|
||||
- Graphite
|
||||
- Greenplum
|
||||
- Hive
|
||||
- Impala
|
||||
- Apache Hive
|
||||
- Apache Impala
|
||||
- InfluxDB
|
||||
- JIRA
|
||||
- IBM Netezza Performance Server
|
||||
- JIRA (JQL)
|
||||
- JSON
|
||||
- Apache Kylin
|
||||
- OmniSciDB (Formerly MapD)
|
||||
- MariaDB
|
||||
- MemSQL
|
||||
- Microsoft Azure Data Warehouse / Synapse
|
||||
- Microsoft Azure SQL Database
|
||||
- Microsoft Azure Data Explorer / Kusto
|
||||
- Microsoft SQL Server
|
||||
- MongoDB
|
||||
- MySQL
|
||||
- Oracle
|
||||
- Apache Phoenix
|
||||
- Apache Pinot
|
||||
- PostgreSQL
|
||||
- Presto
|
||||
- Prometheus
|
||||
@@ -73,9 +87,12 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- ScyllaDB
|
||||
- Shell Scripts
|
||||
- Snowflake
|
||||
- SPARQL
|
||||
- SQLite
|
||||
- TiDB
|
||||
- TreasureData
|
||||
- Trino
|
||||
- Uptycs
|
||||
- Vertica
|
||||
- Yandex AppMetrrica
|
||||
- Yandex Metrica
|
||||
@@ -83,12 +100,13 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
## Getting Help
|
||||
|
||||
* Issues: https://github.com/getredash/redash/issues
|
||||
* Discussion Forum: https://discuss.redash.io/
|
||||
* Discussion Forum: https://github.com/getredash/redash/discussions/
|
||||
* Development Discussion: https://discord.gg/tN5MdmfGBp
|
||||
|
||||
## Reporting Bugs and Contributing Code
|
||||
|
||||
* Want to report a bug or request a feature? Please open [an issue](https://github.com/getredash/redash/issues/new).
|
||||
* Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://redash.io/help-onpremise/dev/guide.html) and make a pull request. We need all the help we can get!
|
||||
* Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://github.com/getredash/redash/wiki/Local-development-setup) and make a pull request. We need all the help we can get!
|
||||
|
||||
## Security
|
||||
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Copy bundle extension files to the client/app/extension directory"""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from shutil import copy
|
||||
from collections import OrderedDict as odict
|
||||
|
||||
import importlib_metadata
|
||||
import importlib_resources
|
||||
|
||||
# Name of the subdirectory
|
||||
BUNDLE_DIRECTORY = "bundle"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Make a directory for extensions and set it as an environment variable
|
||||
# to be picked up by webpack.
|
||||
extensions_relative_path = Path("client", "app", "extensions")
|
||||
extensions_directory = Path(__file__).parent.parent / extensions_relative_path
|
||||
|
||||
if not extensions_directory.exists():
|
||||
extensions_directory.mkdir()
|
||||
os.environ["EXTENSIONS_DIRECTORY"] = str(extensions_relative_path)
|
||||
|
||||
|
||||
def entry_point_module(entry_point):
|
||||
"""Returns the dotted module path for the given entry point"""
|
||||
return entry_point.pattern.match(entry_point.value).group("module")
|
||||
|
||||
|
||||
def load_bundles():
|
||||
""""Load bundles as defined in Redash extensions.
|
||||
|
||||
The bundle entry point can be defined as a dotted path to a module
|
||||
or a callable, but it won't be called but just used as a means
|
||||
to find the files under its file system path.
|
||||
|
||||
The name of the directory it looks for files in is "bundle".
|
||||
|
||||
So a Python package with an extension bundle could look like this::
|
||||
|
||||
my_extensions/
|
||||
├── __init__.py
|
||||
└── wide_footer
|
||||
├── __init__.py
|
||||
└── bundle
|
||||
├── extension.js
|
||||
└── styles.css
|
||||
|
||||
and would then need to register the bundle with an entry point
|
||||
under the "redash.bundles" group, e.g. in your setup.py::
|
||||
|
||||
setup(
|
||||
# ...
|
||||
entry_points={
|
||||
"redash.bundles": [
|
||||
"wide_footer = my_extensions.wide_footer",
|
||||
]
|
||||
# ...
|
||||
},
|
||||
# ...
|
||||
)
|
||||
|
||||
"""
|
||||
bundles = odict()
|
||||
for entry_point in importlib_metadata.entry_points().get("redash.bundles", []):
|
||||
logger.info('Loading Redash bundle "%s".', entry_point.name)
|
||||
module = entry_point_module(entry_point)
|
||||
# Try to get a list of bundle files
|
||||
try:
|
||||
bundle_dir = importlib_resources.files(module).joinpath(BUNDLE_DIRECTORY)
|
||||
except (ImportError, TypeError):
|
||||
# Module isn't a package, so can't have a subdirectory/-package
|
||||
logger.error(
|
||||
'Redash bundle module "%s" could not be imported: "%s"',
|
||||
entry_point.name,
|
||||
module,
|
||||
)
|
||||
continue
|
||||
if not bundle_dir.is_dir():
|
||||
logger.error(
|
||||
'Redash bundle directory "%s" could not be found or is not a directory: "%s"',
|
||||
entry_point.name,
|
||||
bundle_dir,
|
||||
)
|
||||
continue
|
||||
bundles[entry_point.name] = list(bundle_dir.rglob("*"))
|
||||
return bundles
|
||||
|
||||
|
||||
bundles = load_bundles().items()
|
||||
if bundles:
|
||||
print("Number of extension bundles found: {}".format(len(bundles)))
|
||||
else:
|
||||
print("No extension bundles found.")
|
||||
|
||||
for bundle_name, paths in bundles:
|
||||
# Shortcut in case not paths were found for the bundle
|
||||
if not paths:
|
||||
print('No paths found for bundle "{}".'.format(bundle_name))
|
||||
continue
|
||||
|
||||
# The destination for the bundle files with the entry point name as the subdirectory
|
||||
destination = Path(extensions_directory, bundle_name)
|
||||
if not destination.exists():
|
||||
destination.mkdir()
|
||||
|
||||
# Copy the bundle directory from the module to its destination.
|
||||
print('Copying "{}" bundle to {}:'.format(bundle_name, destination.resolve()))
|
||||
for src_path in paths:
|
||||
dest_path = destination / src_path.name
|
||||
print(" - {} -> {}".format(src_path, dest_path))
|
||||
copy(str(src_path), str(dest_path))
|
||||
@@ -22,6 +22,19 @@ worker() {
|
||||
exec supervisord -c worker.conf
|
||||
}
|
||||
|
||||
workers_healthcheck() {
|
||||
WORKERS_COUNT=${WORKERS_COUNT}
|
||||
echo "Checking active workers count against $WORKERS_COUNT..."
|
||||
ACTIVE_WORKERS_COUNT=`echo $(rq info --url $REDASH_REDIS_URL -R | grep workers | grep -oP ^[0-9]+)`
|
||||
if [ "$ACTIVE_WORKERS_COUNT" -lt "$WORKERS_COUNT" ]; then
|
||||
echo "$ACTIVE_WORKERS_COUNT workers are active, Exiting"
|
||||
exit 1
|
||||
else
|
||||
echo "$ACTIVE_WORKERS_COUNT workers are active"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
dev_worker() {
|
||||
echo "Starting dev RQ worker..."
|
||||
|
||||
@@ -32,7 +45,8 @@ server() {
|
||||
# Recycle gunicorn workers every n-th request. See http://docs.gunicorn.org/en/stable/settings.html#max-requests for more details.
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT
|
||||
}
|
||||
|
||||
create_db() {
|
||||
@@ -75,6 +89,10 @@ case "$1" in
|
||||
shift
|
||||
worker
|
||||
;;
|
||||
workers_healthcheck)
|
||||
shift
|
||||
workers_healthcheck
|
||||
;;
|
||||
server)
|
||||
shift
|
||||
server
|
||||
|
||||
@@ -5,5 +5,5 @@ set -o errexit # fail the build if any task fails
|
||||
flake8 --version ; pip --version
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
@@ -1,35 +1,44 @@
|
||||
#!/bin/env python3
|
||||
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def get_change_log(previous_sha):
|
||||
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', 'master...{}'.format(previous_sha)]
|
||||
args = [
|
||||
"git",
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--merges",
|
||||
"--grep",
|
||||
"Merge pull request",
|
||||
'--pretty=format:"%h|%s|%b|%p"',
|
||||
"master...{}".format(previous_sha),
|
||||
]
|
||||
log = subprocess.check_output(args)
|
||||
changes = []
|
||||
|
||||
for line in log.split('\n'):
|
||||
for line in log.split("\n"):
|
||||
try:
|
||||
sha, subject, body, parents = line[1:-1].split('|')
|
||||
sha, subject, body, parents = line[1:-1].split("|")
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = re.match(r"Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = " #{}".format(pull_request)
|
||||
except Exception as ex:
|
||||
except Exception:
|
||||
pull_request = ""
|
||||
|
||||
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
|
||||
author = subprocess.check_output(["git", "log", "-1", '--pretty=format:"%an"', parents.split(" ")[-1]])[1:-1]
|
||||
|
||||
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
previous_sha = sys.argv[1]
|
||||
changes = get_change_log(previous_sha)
|
||||
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
repo = 'getredash/redash'
|
||||
github_token = os.environ["GITHUB_TOKEN"]
|
||||
auth = (github_token, "x-oauth-basic")
|
||||
repo = "getredash/redash"
|
||||
|
||||
|
||||
def _github_request(method, path, params=None, headers={}):
|
||||
if not path.startswith('https://api.github.com'):
|
||||
if urlparse(path).hostname != "api.github.com":
|
||||
url = "https://api.github.com/{}".format(path)
|
||||
else:
|
||||
url = path
|
||||
@@ -22,15 +25,18 @@ def _github_request(method, path, params=None, headers={}):
|
||||
response = requests.request(method, url, data=params, auth=auth)
|
||||
return response
|
||||
|
||||
|
||||
def exception_from_error(message, response):
|
||||
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
|
||||
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get("message", "?")))
|
||||
|
||||
|
||||
def rc_tag_name(version):
|
||||
return "v{}-rc".format(version)
|
||||
|
||||
|
||||
def get_rc_release(version):
|
||||
tag = rc_tag_name(version)
|
||||
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
|
||||
response = _github_request("get", "repos/{}/releases/tags/{}".format(repo, tag))
|
||||
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
@@ -39,84 +45,101 @@ def get_rc_release(version):
|
||||
|
||||
raise exception_from_error("Unknown error while looking RC release: ", response)
|
||||
|
||||
|
||||
def create_release(version, commit_sha):
|
||||
tag = rc_tag_name(version)
|
||||
|
||||
params = {
|
||||
'tag_name': tag,
|
||||
'name': "{} - RC".format(version),
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
"tag_name": tag,
|
||||
"name": "{} - RC".format(version),
|
||||
"target_commitish": commit_sha,
|
||||
"prerelease": True,
|
||||
}
|
||||
|
||||
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
|
||||
response = _github_request("post", "repos/{}/releases".format(repo), params)
|
||||
|
||||
if response.status_code != 201:
|
||||
raise exception_from_error("Failed creating new release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
def upload_asset(release, filepath):
|
||||
upload_url = release['upload_url'].replace('{?name,label}', '')
|
||||
filename = filepath.split('/')[-1]
|
||||
upload_url = release["upload_url"].replace("{?name,label}", "")
|
||||
filename = filepath.split("/")[-1]
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
|
||||
headers = {"Content-Type": "application/gzip"}
|
||||
response = requests.post(
|
||||
upload_url, file_content, params={"name": filename}, headers=headers, auth=auth, verify=False
|
||||
)
|
||||
|
||||
if response.status_code != 201: # not 200/201/...
|
||||
raise exception_from_error('Failed uploading asset', response)
|
||||
raise exception_from_error("Failed uploading asset", response)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def remove_previous_builds(release):
|
||||
for asset in release['assets']:
|
||||
response = _github_request('delete', asset['url'])
|
||||
for asset in release["assets"]:
|
||||
response = _github_request("delete", asset["url"])
|
||||
if response.status_code != 204:
|
||||
raise exception_from_error("Failed deleting asset", response)
|
||||
|
||||
|
||||
def get_changelog(commit_sha):
|
||||
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
|
||||
latest_release = _github_request("get", "repos/{}/releases/latest".format(repo))
|
||||
if latest_release.status_code != 200:
|
||||
raise exception_from_error('Failed getting latest release', latest_release)
|
||||
raise exception_from_error("Failed getting latest release", latest_release)
|
||||
|
||||
latest_release = latest_release.json()
|
||||
previous_sha = latest_release['target_commitish']
|
||||
previous_sha = latest_release["target_commitish"]
|
||||
|
||||
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
|
||||
args = [
|
||||
"git",
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--merges",
|
||||
"--grep",
|
||||
"Merge pull request",
|
||||
'--pretty=format:"%h|%s|%b|%p"',
|
||||
"{}...{}".format(previous_sha, commit_sha),
|
||||
]
|
||||
log = subprocess.check_output(args)
|
||||
changes = ["Changes since {}:".format(latest_release['name'])]
|
||||
changes = ["Changes since {}:".format(latest_release["name"])]
|
||||
|
||||
for line in log.split('\n'):
|
||||
for line in log.split("\n"):
|
||||
try:
|
||||
sha, subject, body, parents = line[1:-1].split('|')
|
||||
sha, subject, body, parents = line[1:-1].split("|")
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = re.match(r"Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = " #{}".format(pull_request)
|
||||
except Exception as ex:
|
||||
except Exception:
|
||||
pull_request = ""
|
||||
|
||||
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
|
||||
author = subprocess.check_output(["git", "log", "-1", '--pretty=format:"%an"', parents.split(" ")[-1]])[1:-1]
|
||||
|
||||
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
|
||||
|
||||
return "\n".join(changes)
|
||||
|
||||
|
||||
def update_release_commit_sha(release, commit_sha):
|
||||
params = {
|
||||
'target_commitish': commit_sha,
|
||||
"target_commitish": commit_sha,
|
||||
}
|
||||
|
||||
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
|
||||
response = _github_request("patch", "repos/{}/releases/{}".format(repo, release["id"]), params)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating commit sha for existing release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
def update_release(version, build_filepath, commit_sha):
|
||||
try:
|
||||
release = get_rc_release(version)
|
||||
@@ -125,21 +148,22 @@ def update_release(version, build_filepath, commit_sha):
|
||||
else:
|
||||
release = create_release(version, commit_sha)
|
||||
|
||||
print("Using release id: {}".format(release['id']))
|
||||
print("Using release id: {}".format(release["id"]))
|
||||
|
||||
remove_previous_builds(release)
|
||||
response = upload_asset(release, build_filepath)
|
||||
|
||||
changelog = get_changelog(commit_sha)
|
||||
|
||||
response = _github_request('patch', release['url'], {'body': changelog})
|
||||
response = _github_request("patch", release["url"], {"body": changelog})
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating release description", response)
|
||||
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
commit_sha = sys.argv[1]
|
||||
version = sys.argv[2]
|
||||
filepath = sys.argv[3]
|
||||
|
||||
96
bin/upgrade
@@ -1,9 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
import urllib
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
from collections import namedtuple
|
||||
from fnmatch import fnmatch
|
||||
|
||||
@@ -15,8 +15,8 @@ except ImportError:
|
||||
print("Missing required library: semver.")
|
||||
exit(1)
|
||||
|
||||
REDASH_HOME = os.environ.get('REDASH_HOME', '/opt/redash')
|
||||
CURRENT_VERSION_PATH = '{}/current'.format(REDASH_HOME)
|
||||
REDASH_HOME = os.environ.get("REDASH_HOME", "/opt/redash")
|
||||
CURRENT_VERSION_PATH = "{}/current".format(REDASH_HOME)
|
||||
|
||||
|
||||
def run(cmd, cwd=None):
|
||||
@@ -27,11 +27,11 @@ def run(cmd, cwd=None):
|
||||
|
||||
|
||||
def confirm(question):
|
||||
reply = str(input(question + ' (y/n): ')).lower().strip()
|
||||
reply = str(input(question + " (y/n): ")).lower().strip()
|
||||
|
||||
if reply[0] == 'y':
|
||||
if reply[0] == "y":
|
||||
return True
|
||||
if reply[0] == 'n':
|
||||
if reply[0] == "n":
|
||||
return False
|
||||
else:
|
||||
return confirm("Please use 'y' or 'n'")
|
||||
@@ -40,7 +40,8 @@ def confirm(question):
|
||||
def version_path(version_name):
|
||||
return "{}/{}".format(REDASH_HOME, version_name)
|
||||
|
||||
END_CODE = '\033[0m'
|
||||
|
||||
END_CODE = "\033[0m"
|
||||
|
||||
|
||||
def colored_string(text, color):
|
||||
@@ -51,60 +52,62 @@ def colored_string(text, color):
|
||||
|
||||
|
||||
def h1(text):
|
||||
print(colored_string(text, '\033[4m\033[1m'))
|
||||
print(colored_string(text, "\033[4m\033[1m"))
|
||||
|
||||
|
||||
def green(text):
|
||||
print(colored_string(text, '\033[92m'))
|
||||
print(colored_string(text, "\033[92m"))
|
||||
|
||||
|
||||
def red(text):
|
||||
print(colored_string(text, '\033[91m'))
|
||||
print(colored_string(text, "\033[91m"))
|
||||
|
||||
|
||||
class Release(namedtuple('Release', ('version', 'download_url', 'filename', 'description'))):
|
||||
class Release(namedtuple("Release", ("version", "download_url", "filename", "description"))):
|
||||
def v1_or_newer(self):
|
||||
return semver.compare(self.version, '1.0.0-alpha') >= 0
|
||||
return semver.compare(self.version, "1.0.0-alpha") >= 0
|
||||
|
||||
def is_newer(self, version):
|
||||
return semver.compare(self.version, version) > 0
|
||||
|
||||
@property
|
||||
def version_name(self):
|
||||
return self.filename.replace('.tar.gz', '')
|
||||
return self.filename.replace(".tar.gz", "")
|
||||
|
||||
|
||||
def get_latest_release_from_ci():
|
||||
response = requests.get('https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts?branch=master')
|
||||
response = requests.get(
|
||||
"https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts?branch=master"
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
exit("Failed getting releases (status code: %s)." % response.status_code)
|
||||
|
||||
tarball_asset = filter(lambda asset: asset['url'].endswith('.tar.gz'), response.json())[0]
|
||||
filename = urllib.unquote(tarball_asset['pretty_path'].split('/')[-1])
|
||||
version = filename.replace('redash.', '').replace('.tar.gz', '')
|
||||
tarball_asset = filter(lambda asset: asset["url"].endswith(".tar.gz"), response.json())[0]
|
||||
filename = urllib.unquote(tarball_asset["pretty_path"].split("/")[-1])
|
||||
version = filename.replace("redash.", "").replace(".tar.gz", "")
|
||||
|
||||
release = Release(version, tarball_asset['url'], filename, '')
|
||||
release = Release(version, tarball_asset["url"], filename, "")
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def get_release(channel):
|
||||
if channel == 'ci':
|
||||
if channel == "ci":
|
||||
return get_latest_release_from_ci()
|
||||
|
||||
response = requests.get('https://version.redash.io/api/releases?channel={}'.format(channel))
|
||||
response = requests.get("https://version.redash.io/api/releases?channel={}".format(channel))
|
||||
release = response.json()[0]
|
||||
|
||||
filename = release['download_url'].split('/')[-1]
|
||||
release = Release(release['version'], release['download_url'], filename, release['description'])
|
||||
filename = release["download_url"].split("/")[-1]
|
||||
release = Release(release["version"], release["download_url"], filename, release["description"])
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def link_to_current(version_name):
|
||||
green("Linking to current version...")
|
||||
run('ln -nfs {} {}'.format(version_path(version_name), CURRENT_VERSION_PATH))
|
||||
run("ln -nfs {} {}".format(version_path(version_name), CURRENT_VERSION_PATH))
|
||||
|
||||
|
||||
def restart_services():
|
||||
@@ -113,25 +116,25 @@ def restart_services():
|
||||
# directory.
|
||||
green("Restarting...")
|
||||
try:
|
||||
run('sudo /etc/init.d/redash_supervisord restart')
|
||||
run("sudo /etc/init.d/redash_supervisord restart")
|
||||
except subprocess.CalledProcessError as e:
|
||||
run('sudo service supervisor restart')
|
||||
run("sudo service supervisor restart")
|
||||
|
||||
|
||||
def update_requirements(version_name):
|
||||
green("Installing new Python packages (if needed)...")
|
||||
new_requirements_file = '{}/requirements.txt'.format(version_path(version_name))
|
||||
new_requirements_file = "{}/requirements.txt".format(version_path(version_name))
|
||||
|
||||
install_requirements = False
|
||||
|
||||
try:
|
||||
run('diff {}/requirements.txt {}'.format(CURRENT_VERSION_PATH, new_requirements_file)) != 0
|
||||
run("diff {}/requirements.txt {}".format(CURRENT_VERSION_PATH, new_requirements_file)) != 0
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode != 0:
|
||||
install_requirements = True
|
||||
|
||||
if install_requirements:
|
||||
run('sudo pip install -r {}'.format(new_requirements_file))
|
||||
run("sudo pip install -r {}".format(new_requirements_file))
|
||||
|
||||
|
||||
def apply_migrations(release):
|
||||
@@ -143,8 +146,12 @@ def apply_migrations(release):
|
||||
|
||||
|
||||
def find_migrations(version_name):
|
||||
current_migrations = set([f for f in os.listdir("{}/migrations".format(CURRENT_VERSION_PATH)) if fnmatch(f, '*_*.py')])
|
||||
new_migrations = sorted([f for f in os.listdir("{}/migrations".format(version_path(version_name))) if fnmatch(f, '*_*.py')])
|
||||
current_migrations = set(
|
||||
[f for f in os.listdir("{}/migrations".format(CURRENT_VERSION_PATH)) if fnmatch(f, "*_*.py")]
|
||||
)
|
||||
new_migrations = sorted(
|
||||
[f for f in os.listdir("{}/migrations".format(version_path(version_name))) if fnmatch(f, "*_*.py")]
|
||||
)
|
||||
|
||||
return [m for m in new_migrations if m not in current_migrations]
|
||||
|
||||
@@ -154,40 +161,45 @@ def apply_migrations_pre_v1(version_name):
|
||||
|
||||
if new_migrations:
|
||||
green("New migrations to run: ")
|
||||
print(', '.join(new_migrations))
|
||||
print(", ".join(new_migrations))
|
||||
else:
|
||||
print("No new migrations in this version.")
|
||||
|
||||
if new_migrations and confirm("Apply new migrations? (make sure you have backup)"):
|
||||
for migration in new_migrations:
|
||||
print("Applying {}...".format(migration))
|
||||
run("sudo sudo -u redash PYTHONPATH=. bin/run python migrations/{}".format(migration), cwd=version_path(version_name))
|
||||
run(
|
||||
"sudo sudo -u redash PYTHONPATH=. bin/run python migrations/{}".format(migration),
|
||||
cwd=version_path(version_name),
|
||||
)
|
||||
|
||||
|
||||
def download_and_unpack(release):
|
||||
directory_name = release.version_name
|
||||
|
||||
green("Downloading release tarball...")
|
||||
run('sudo wget --header="Accept: application/octet-stream" -O {} {}'.format(release.filename, release.download_url))
|
||||
run(
|
||||
'sudo wget --header="Accept: application/octet-stream" -O {} {}'.format(release.filename, release.download_url)
|
||||
)
|
||||
green("Unpacking to: {}...".format(directory_name))
|
||||
run('sudo mkdir -p {}'.format(directory_name))
|
||||
run('sudo tar -C {} -xvf {}'.format(directory_name, release.filename))
|
||||
run("sudo mkdir -p {}".format(directory_name))
|
||||
run("sudo tar -C {} -xvf {}".format(directory_name, release.filename))
|
||||
|
||||
green("Changing ownership to redash...")
|
||||
run('sudo chown redash {}'.format(directory_name))
|
||||
run("sudo chown redash {}".format(directory_name))
|
||||
|
||||
green("Linking .env file...")
|
||||
run('sudo ln -nfs {}/.env {}/.env'.format(REDASH_HOME, version_path(directory_name)))
|
||||
run("sudo ln -nfs {}/.env {}/.env".format(REDASH_HOME, version_path(directory_name)))
|
||||
|
||||
|
||||
def current_version():
|
||||
real_current_path = os.path.realpath(CURRENT_VERSION_PATH).replace('.b', '+b')
|
||||
return real_current_path.replace(REDASH_HOME + '/', '').replace('redash.', '')
|
||||
real_current_path = os.path.realpath(CURRENT_VERSION_PATH).replace(".b", "+b")
|
||||
return real_current_path.replace(REDASH_HOME + "/", "").replace("redash.", "")
|
||||
|
||||
|
||||
def verify_minimum_version():
|
||||
green("Current version: " + current_version())
|
||||
if semver.compare(current_version(), '0.12.0') < 0:
|
||||
if semver.compare(current_version(), "0.12.0") < 0:
|
||||
red("You need to have Redash v0.12.0 or newer to upgrade to post v1.0.0 releases.")
|
||||
green("To upgrade to v0.12.0, run the upgrade script set to the legacy channel (--channel legacy).")
|
||||
exit(1)
|
||||
@@ -234,9 +246,9 @@ def deploy_release(channel):
|
||||
red("Exit status: {}\nOutput:\n{}".format(e.returncode, e.output))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--channel", help="The channel to get release from (default: stable).", default='stable')
|
||||
parser.add_argument("--channel", help="The channel to get release from (default: stable).", default="stable")
|
||||
args = parser.parse_args()
|
||||
|
||||
deploy_release(args.channel)
|
||||
|
||||
BIN
client/app/assets/images/db-logos/arangodb.png
Normal file
|
After Width: | Height: | Size: 97 KiB |
BIN
client/app/assets/images/db-logos/databend.png
Normal file
|
After Width: | Height: | Size: 3.2 KiB |
|
Before Width: | Height: | Size: 12 KiB |
BIN
client/app/assets/images/db-logos/elasticsearch2.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
|
After Width: | Height: | Size: 16 KiB |
|
After Width: | Height: | Size: 16 KiB |
BIN
client/app/assets/images/db-logos/google_analytics4.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
client/app/assets/images/db-logos/google_search_console.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
client/app/assets/images/db-logos/ignite.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
client/app/assets/images/db-logos/nz.png
Normal file
|
After Width: | Height: | Size: 1.2 KiB |
BIN
client/app/assets/images/db-logos/pinot.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
client/app/assets/images/destinations/asana.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
client/app/assets/images/destinations/discord.png
Normal file
|
After Width: | Height: | Size: 7.0 KiB |
|
Before Width: | Height: | Size: 12 KiB |
|
After Width: | Height: | Size: 4.1 KiB |
@@ -90,6 +90,23 @@ body.fixed-layout {
|
||||
.embed__vis {
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
height: calc(~'100vh - 25px');
|
||||
|
||||
> .embed-heading {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
> .query__vis {
|
||||
flex: 1 1 auto;
|
||||
|
||||
.chart-visualization-container, .visualization-renderer-wrapper, .visualization-renderer {
|
||||
height: 100%
|
||||
}
|
||||
}
|
||||
|
||||
> .tile__bottom-control {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
|
||||
@@ -17,6 +17,13 @@ export default function ApplicationArea() {
|
||||
useEffect(() => {
|
||||
function globalErrorHandler(event) {
|
||||
event.preventDefault();
|
||||
if (event.message === "Uncaught SyntaxError: Unexpected token '<'") {
|
||||
// if we see a javascript error on unexpected token where the unexpected token is '<', this usually means that a fallback html file (like index.html)
|
||||
// was served as content of script rather than the expected script, give a friendlier message in the console on what could be going on
|
||||
console.error(
|
||||
`[Uncaught SyntaxError: Unexpected token '<'] usually means that a fallback html file was returned from server rather than the expected script. Check that the server is properly serving the file ${event.filename}.`
|
||||
);
|
||||
}
|
||||
setUnhandledError(event.error);
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ function SelectWithVirtualScroll({ options, ...props }: VirtualScrollSelectProps
|
||||
<AntdSelect<string>
|
||||
dropdownMatchSelectWidth={dropdownMatchSelectWidth}
|
||||
options={options}
|
||||
allowClear={true}
|
||||
optionFilterProp="label" // as this component expects "options" prop
|
||||
{...props}
|
||||
/>
|
||||
|
||||
@@ -123,7 +123,6 @@
|
||||
right: 10px;
|
||||
bottom: 15px;
|
||||
height: auto;
|
||||
overflow: hidden;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,6 +151,7 @@ export default function DynamicForm({
|
||||
onSubmit,
|
||||
}) {
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [isTouched, setIsTouched] = useState(false);
|
||||
const [showExtraFields, setShowExtraFields] = useState(defaultShowExtraFields);
|
||||
const [form] = Form.useForm();
|
||||
const extraFields = filter(fields, { extra: true });
|
||||
@@ -163,9 +164,8 @@ export default function DynamicForm({
|
||||
onSubmit(
|
||||
values,
|
||||
msg => {
|
||||
const { setFieldsValue, getFieldsValue } = form;
|
||||
setIsSubmitting(false);
|
||||
setFieldsValue(getFieldsValue()); // reset form touched state
|
||||
setIsTouched(false); // reset form touched state
|
||||
notification.success(msg);
|
||||
},
|
||||
msg => {
|
||||
@@ -174,7 +174,7 @@ export default function DynamicForm({
|
||||
}
|
||||
);
|
||||
},
|
||||
[form, fields, onSubmit]
|
||||
[fields, onSubmit]
|
||||
);
|
||||
|
||||
const handleFinishFailed = useCallback(
|
||||
@@ -187,6 +187,9 @@ export default function DynamicForm({
|
||||
return (
|
||||
<Form
|
||||
form={form}
|
||||
onFieldsChange={() => {
|
||||
setIsTouched(true);
|
||||
}}
|
||||
id={id}
|
||||
className="dynamic-form"
|
||||
layout="vertical"
|
||||
@@ -216,7 +219,7 @@ export default function DynamicForm({
|
||||
{saveText}
|
||||
</Button>
|
||||
)}
|
||||
<DynamicFormActions actions={actions} isFormDirty={form.isFieldsTouched()} />
|
||||
<DynamicFormActions actions={actions} isFormDirty={isTouched} />
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -11,17 +11,17 @@
|
||||
> .layout-content {
|
||||
flex: 1 0 auto;
|
||||
width: 75%;
|
||||
order: 0;
|
||||
order: 1;
|
||||
margin: 0;
|
||||
padding: 0 0 0 @spacing
|
||||
}
|
||||
|
||||
> .layout-sidebar {
|
||||
flex: 0 0 auto;
|
||||
width: 25%;
|
||||
max-width: 350px;
|
||||
order: 1;
|
||||
order: 0;
|
||||
margin: 0;
|
||||
padding: 0 0 0 @spacing;
|
||||
}
|
||||
|
||||
@media (max-width: 990px) {
|
||||
@@ -31,6 +31,7 @@
|
||||
width: 100%;
|
||||
order: 1;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
> .layout-sidebar {
|
||||
@@ -38,7 +39,6 @@
|
||||
max-width: none;
|
||||
order: 0;
|
||||
margin: 0 0 @spacing 0;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,10 @@ defineDummySnippets("sql");
|
||||
defineDummySnippets("json");
|
||||
defineDummySnippets("yaml");
|
||||
|
||||
// without this line, ace will try to load a non-existent mode-custom.js file
|
||||
// for data sources with syntax = "custom"
|
||||
ace.define("ace/mode/custom", [], () => {});
|
||||
|
||||
function buildTableColumnKeywords(table) {
|
||||
const keywords = [];
|
||||
table.columns.forEach(column => {
|
||||
|
||||
@@ -15,6 +15,7 @@ import { DashboardTagsControl } from "@/components/tags-control/TagsControl";
|
||||
import getTags from "@/services/getTags";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import { policy } from "@/services/policy";
|
||||
import recordEvent from "@/services/recordEvent";
|
||||
import { durationHumanize } from "@/lib/utils";
|
||||
import { DashboardStatusEnum } from "../hooks/useDashboard";
|
||||
|
||||
@@ -175,6 +176,7 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
|
||||
fullscreen,
|
||||
toggleFullscreen,
|
||||
showShareDashboardDialog,
|
||||
updateDashboard,
|
||||
} = dashboardConfiguration;
|
||||
const showPublishButton = dashboard.is_draft;
|
||||
const showRefreshButton = true;
|
||||
@@ -182,8 +184,14 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
|
||||
const canShareDashboard = canEditDashboard && !dashboard.is_draft;
|
||||
const showShareButton = !clientConfig.disablePublicUrls && (dashboard.publicAccessEnabled || canShareDashboard);
|
||||
const showMoreOptionsButton = canEditDashboard;
|
||||
|
||||
const unarchiveDashboard = () => {
|
||||
recordEvent("unarchive", "dashboard", dashboard.id);
|
||||
updateDashboard({ is_archived: false }, false);
|
||||
};
|
||||
return (
|
||||
<div className="dashboard-control">
|
||||
{dashboard.can_edit && dashboard.is_archived && <Button onClick={unarchiveDashboard}>Unarchive</Button>}
|
||||
{!dashboard.is_archived && (
|
||||
<span className="hidden-print">
|
||||
{showPublishButton && (
|
||||
|
||||
@@ -6,11 +6,6 @@ div.tags-list {
|
||||
-ms-user-select: none; /* IE10+ */
|
||||
}
|
||||
|
||||
.page-dashboard-list .page-header-actions {
|
||||
width: 25%; /* same as sidebar */
|
||||
max-width: 350px; /* same as sidebar */
|
||||
}
|
||||
|
||||
/* same rule as for sidebar */
|
||||
@media (max-width: 990px) {
|
||||
.page-dashboard-list .page-header-actions {
|
||||
|
||||
@@ -3,10 +3,6 @@
|
||||
height: 35px;
|
||||
}
|
||||
|
||||
.page-queries-list .page-header-actions {
|
||||
width: 25%; /* same as sidebar */
|
||||
max-width: 350px; /* same as sidebar */
|
||||
}
|
||||
|
||||
/* same rule as for sidebar */
|
||||
@media (max-width: 990px) {
|
||||
|
||||
@@ -134,11 +134,10 @@ function QuerySource(props) {
|
||||
// choose data source id for new queries
|
||||
if (dataSourcesLoaded && queryFlags.isNew) {
|
||||
const firstDataSourceId = dataSources.length > 0 ? dataSources[0].id : null;
|
||||
const selectedDataSourceId = parseInt(localStorage.getItem("lastSelectedDataSourceId")) || null;
|
||||
|
||||
handleDataSourceChange(
|
||||
chooseDataSourceId(
|
||||
[query.data_source_id, localStorage.getItem("lastSelectedDataSourceId"), firstDataSourceId],
|
||||
dataSources
|
||||
)
|
||||
chooseDataSourceId([query.data_source_id, selectedDataSourceId, firstDataSourceId], dataSources)
|
||||
);
|
||||
}
|
||||
}, [query.data_source_id, queryFlags.isNew, dataSourcesLoaded, dataSources, handleDataSourceChange]);
|
||||
|
||||
@@ -113,6 +113,10 @@ export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
});
|
||||
}
|
||||
|
||||
export function isDateTime(v) {
|
||||
return isString(v) && moment(v).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
|
||||
}
|
||||
|
||||
class QueryResult {
|
||||
constructor(props) {
|
||||
this.deferred = defer();
|
||||
@@ -147,7 +151,7 @@ class QueryResult {
|
||||
let newType = null;
|
||||
if (isNumber(v)) {
|
||||
newType = "float";
|
||||
} else if (isString(v) && v.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||
} else if (isDateTime(v)) {
|
||||
row[k] = moment.utc(v);
|
||||
newType = "datetime";
|
||||
} else if (isString(v) && v.match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||
@@ -318,6 +322,9 @@ class QueryResult {
|
||||
}
|
||||
return v;
|
||||
});
|
||||
if (filter.values.length > 1 && filter.multiple) {
|
||||
filter.current = filter.values.slice();
|
||||
}
|
||||
});
|
||||
|
||||
return filters;
|
||||
|
||||
17
client/app/services/query-result.test.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import { isDateTime } from "@/services/query-result";
|
||||
|
||||
describe("isDateTime", () => {
|
||||
it.each([
|
||||
["2022-01-01T00:00:00", true],
|
||||
["2022-01-01T00:00:00+09:00", true],
|
||||
["2021-01-27T00:00:01.733983944+03:00 stderr F {", false],
|
||||
["2021-01-27Z00:00:00+09:00", false],
|
||||
["2021-01-27", false],
|
||||
["foo bar", false],
|
||||
[2022, false],
|
||||
[null, false],
|
||||
["", false],
|
||||
])("isDateTime('%s'). expected '%s'.", (value, expected) => {
|
||||
expect(isDateTime(value)).toBe(expected);
|
||||
});
|
||||
});
|
||||
@@ -13,7 +13,7 @@ try {
|
||||
cypressConfigBaseUrl = cypressConfig.baseUrl;
|
||||
} catch (e) {}
|
||||
|
||||
const baseUrl = process.env.CYPRESS_baseUrl || cypressConfigBaseUrl || "http://localhost:5000";
|
||||
const baseUrl = process.env.CYPRESS_baseUrl || cypressConfigBaseUrl || "http://localhost:5001";
|
||||
|
||||
function seedDatabase(seedValues) {
|
||||
get(baseUrl + "/login", (_, { headers }) => {
|
||||
@@ -63,10 +63,11 @@ function runCypressCI() {
|
||||
PERCY_TOKEN_ENCODED,
|
||||
CYPRESS_PROJECT_ID_ENCODED,
|
||||
CYPRESS_RECORD_KEY_ENCODED,
|
||||
CIRCLE_REPOSITORY_URL,
|
||||
GITHUB_REPOSITORY,
|
||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||
} = process.env;
|
||||
|
||||
if (CIRCLE_REPOSITORY_URL && CIRCLE_REPOSITORY_URL.includes("getredash/redash")) {
|
||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
||||
if (PERCY_TOKEN_ENCODED) {
|
||||
process.env.PERCY_TOKEN = atob(`${PERCY_TOKEN_ENCODED}`);
|
||||
}
|
||||
@@ -76,10 +77,11 @@ function runCypressCI() {
|
||||
if (CYPRESS_RECORD_KEY_ENCODED) {
|
||||
process.env.CYPRESS_RECORD_KEY = atob(`${CYPRESS_RECORD_KEY_ENCODED}`);
|
||||
}
|
||||
process.env.CYPRESS_OPTIONS = "--record";
|
||||
}
|
||||
|
||||
execSync(
|
||||
"COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker-compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run --record",
|
||||
"COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker-compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
}
|
||||
@@ -118,6 +120,6 @@ switch (command) {
|
||||
stopServer();
|
||||
break;
|
||||
default:
|
||||
console.log("Usage: npm run cypress [build|start|db-seed|open|run|stop]");
|
||||
console.log("Usage: yarn cypress [build|start|db-seed|open|run|stop]");
|
||||
break;
|
||||
}
|
||||
|
||||
24
client/cypress/integration/dashboard/dashboard_list.js
Normal file
@@ -0,0 +1,24 @@
|
||||
describe("Dashboard list sort", () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
});
|
||||
|
||||
it("creates one dashboard", () => {
|
||||
cy.visit("/dashboards");
|
||||
cy.getByTestId("CreateButton").click();
|
||||
cy.getByTestId("CreateDashboardMenuItem").click();
|
||||
cy.getByTestId("CreateDashboardDialog").within(() => {
|
||||
cy.get("input").type("A Foo Bar");
|
||||
cy.getByTestId("DashboardSaveButton").click();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Sorting table does not crash page ", () => {
|
||||
it("sorts", () => {
|
||||
cy.visit("/dashboards");
|
||||
cy.contains("Name").click();
|
||||
cy.wait(1000); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("ErrorMessage").should("not.exist");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createQueryAndAddWidget, editDashboard } from "../../support/dashboard";
|
||||
import { dragParam, expectParamOrder } from "../../support/parameters";
|
||||
import { createQueryAndAddWidget } from "../../support/dashboard";
|
||||
|
||||
describe("Dashboard Parameters", () => {
|
||||
const parameters = [
|
||||
@@ -59,16 +58,6 @@ describe("Dashboard Parameters", () => {
|
||||
});
|
||||
};
|
||||
|
||||
const setWidgetParametersToDashboard = parameters => {
|
||||
cy.wrap(parameters).each(({ name: paramName }, i) => {
|
||||
cy.getByTestId(`EditParamMappingButton-${paramName}`).click();
|
||||
cy.getByTestId("NewDashboardParameterOption")
|
||||
.filter(":visible")
|
||||
.click();
|
||||
return saveMappingOptions(i === parameters.length - 1);
|
||||
});
|
||||
};
|
||||
|
||||
it("supports widget parameters", function() {
|
||||
// widget parameter mapping is the default for the API
|
||||
cy.getByTestId(this.widgetTestId).within(() => {
|
||||
@@ -86,27 +75,6 @@ describe("Dashboard Parameters", () => {
|
||||
cy.getByTestId("DashboardParameters").should("not.exist");
|
||||
});
|
||||
|
||||
it("supports dashboard parameters", function() {
|
||||
openMappingOptions(this.widgetTestId);
|
||||
setWidgetParametersToDashboard(parameters);
|
||||
|
||||
cy.getByTestId(this.widgetTestId).within(() => {
|
||||
cy.getByTestId("ParameterName-param1").should("not.exist");
|
||||
});
|
||||
|
||||
cy.getByTestId("DashboardParameters").within(() => {
|
||||
cy.getByTestId("ParameterName-param1")
|
||||
.find("input")
|
||||
.type("{selectall}DashboardParam");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
});
|
||||
|
||||
cy.getByTestId(this.widgetTestId).within(() => {
|
||||
cy.getByTestId("TableVisualization").should("contain", "DashboardParam");
|
||||
});
|
||||
});
|
||||
|
||||
it("supports static values for parameters", function() {
|
||||
openMappingOptions(this.widgetTestId);
|
||||
cy.getByTestId("EditParamMappingButton-param1").click();
|
||||
@@ -131,34 +99,4 @@ describe("Dashboard Parameters", () => {
|
||||
cy.getByTestId("TableVisualization").should("contain", "StaticValue");
|
||||
});
|
||||
});
|
||||
|
||||
it("reorders parameters", function() {
|
||||
// Reorder is only available in edit mode
|
||||
editDashboard();
|
||||
|
||||
const [param1, param2] = parameters;
|
||||
|
||||
cy.getByTestId("ParameterBlock-param1")
|
||||
.invoke("width")
|
||||
.then(paramWidth => {
|
||||
cy.server();
|
||||
cy.route("POST", `**/api/dashboards/*`).as("SaveDashboard");
|
||||
cy.route("POST", `**/api/widgets/*`).as("SaveWidget");
|
||||
|
||||
// Asserts widget param order
|
||||
dragParam(param1.name, paramWidth, 1);
|
||||
cy.wait("@SaveWidget");
|
||||
cy.reload();
|
||||
expectParamOrder([param2.title, param1.title]);
|
||||
|
||||
// Asserts dashboard param order
|
||||
openMappingOptions(this.widgetTestId);
|
||||
setWidgetParametersToDashboard(parameters);
|
||||
cy.wait("@SaveWidget");
|
||||
dragParam(param1.name, paramWidth, 1);
|
||||
cy.wait("@SaveDashboard");
|
||||
cy.reload();
|
||||
expectParamOrder([param2.title, param1.title]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -141,7 +141,7 @@ describe("Textbox", () => {
|
||||
})
|
||||
.should($el => {
|
||||
const { top, left } = $el.offset();
|
||||
expect(top).to.eq(162);
|
||||
expect(top).to.be.oneOf([162, 162.015625]);
|
||||
expect(left).to.eq(282);
|
||||
expect($el.width()).to.eq(545);
|
||||
expect($el.height()).to.eq(185);
|
||||
|
||||
@@ -177,7 +177,7 @@ describe("Widget", () => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.getByTestId("TableVisualization")
|
||||
.its("0.offsetHeight")
|
||||
.should("eq", 381);
|
||||
.should("be.oneOf", [380, 381]);
|
||||
cy.percySnapshot("Shows correct height of table visualization");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -47,7 +47,7 @@ describe("Create Data Source", () => {
|
||||
cy.getByTestId("User").type("postgres");
|
||||
cy.getByTestId("Password").type("postgres");
|
||||
cy.getByTestId("Database Name").type("postgres{enter}");
|
||||
cy.getByTestId("CreateSourceSaveButton").click();
|
||||
cy.getByTestId("CreateSourceSaveButton").click({ force: true });
|
||||
|
||||
cy.contains("Saved.");
|
||||
});
|
||||
|
||||
@@ -15,7 +15,7 @@ describe("Create Destination", () => {
|
||||
|
||||
cy.getByTestId("PreviewItem")
|
||||
.then($previewItems => Cypress.$.map($previewItems, item => Cypress.$(item).attr("data-test-type")))
|
||||
.then(availableTypes => expect(availableTypes).not.to.contain.members(this.deprecatedTypes));
|
||||
.then(availableTypes => expect(availableTypes).not.to.contain.oneOf(this.deprecatedTypes));
|
||||
|
||||
cy.getByTestId("CreateSourceDialog").should("contain", "Email");
|
||||
cy.wait(1000); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
|
||||
@@ -68,19 +68,11 @@ describe("Query Filters", () => {
|
||||
}
|
||||
|
||||
it("filters rows in a Table Visualization", () => {
|
||||
expectSelectedOptionsToHaveMembers(["a"]);
|
||||
expectTableToHaveLength(4);
|
||||
expectFirstColumnToHaveMembers(["a", "a", "a", "a"]);
|
||||
// Defaults to All Options Selected
|
||||
|
||||
cy.getByTestId("FilterName-stage1::multi-filter")
|
||||
.find(".ant-select-selector")
|
||||
.click();
|
||||
cy.contains(".ant-select-item-option-content", "b").click();
|
||||
cy.getByTestId("FilterName-stage1::multi-filter").click(); // close dropdown
|
||||
|
||||
expectSelectedOptionsToHaveMembers(["a", "b"]);
|
||||
expectTableToHaveLength(7);
|
||||
expectFirstColumnToHaveMembers(["a", "a", "a", "a", "b", "b", "b"]);
|
||||
expectSelectedOptionsToHaveMembers(["a", "b", "c"]);
|
||||
expectTableToHaveLength(11);
|
||||
expectFirstColumnToHaveMembers(["a", "a", "a", "a", "b", "b", "b", "c", "c", "c", "c"]);
|
||||
|
||||
// Clear Option
|
||||
|
||||
@@ -92,6 +84,30 @@ describe("Query Filters", () => {
|
||||
|
||||
cy.getByTestId("TableVisualization").should("not.exist");
|
||||
|
||||
// Single Option selected
|
||||
|
||||
cy.getByTestId("FilterName-stage1::multi-filter")
|
||||
.find(".ant-select-selector")
|
||||
.click();
|
||||
cy.contains(".ant-select-item-option-grouped > .ant-select-item-option-content", "a").click();
|
||||
cy.getByTestId("FilterName-stage1::multi-filter").click(); // close dropdown
|
||||
|
||||
expectSelectedOptionsToHaveMembers(["a"]);
|
||||
expectTableToHaveLength(4);
|
||||
expectFirstColumnToHaveMembers(["a", "a", "a", "a"]);
|
||||
|
||||
// Two Options selected
|
||||
|
||||
cy.getByTestId("FilterName-stage1::multi-filter")
|
||||
.find(".ant-select-selector")
|
||||
.click();
|
||||
cy.contains(".ant-select-item-option-content", "b").click();
|
||||
cy.getByTestId("FilterName-stage1::multi-filter").click(); // close dropdown
|
||||
|
||||
expectSelectedOptionsToHaveMembers(["a", "b"]);
|
||||
expectTableToHaveLength(7);
|
||||
expectFirstColumnToHaveMembers(["a", "a", "a", "a", "b", "b", "b"]);
|
||||
|
||||
// Select All Option
|
||||
|
||||
cy.getByTestId("FilterName-stage1::multi-filter")
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { dragParam } from "../../support/parameters";
|
||||
import dayjs from "dayjs";
|
||||
|
||||
function openAndSearchAntdDropdown(testId, paramOption) {
|
||||
cy.getByTestId(testId)
|
||||
@@ -310,7 +311,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("15/MM/YY"));
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("15/MM/YY"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function() {
|
||||
@@ -322,7 +323,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("DD/MM/YY"));
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("DD/MM/YY"));
|
||||
});
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
@@ -364,7 +365,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("YYYY-MM-15 HH:mm"));
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-15 HH:mm"));
|
||||
});
|
||||
|
||||
it("shows the current datetime after clicking in Now", function() {
|
||||
@@ -379,7 +380,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("YYYY-MM-DD HH:mm"));
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-DD HH:mm"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function() {
|
||||
@@ -391,7 +392,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", Cypress.moment(this.now).format("YYYY-MM-DD HH:mm"));
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-DD HH:mm"));
|
||||
});
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
@@ -449,7 +450,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
const now = Cypress.moment(this.now);
|
||||
const now = dayjs(this.now);
|
||||
cy.getByTestId("TableVisualization").should(
|
||||
"contain",
|
||||
now.format("YYYY-MM-15") + " - " + now.format("YYYY-MM-20")
|
||||
@@ -465,7 +466,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
const lastMonth = Cypress.moment(this.now).subtract(1, "month");
|
||||
const lastMonth = dayjs(this.now).subtract(1, "month");
|
||||
cy.getByTestId("TableVisualization").should(
|
||||
"contain",
|
||||
lastMonth.startOf("month").format("YYYY-MM-DD") + " - " + lastMonth.endOf("month").format("YYYY-MM-DD")
|
||||
|
||||
@@ -99,8 +99,14 @@ describe("Pivot", () => {
|
||||
.focus()
|
||||
.type(" UNION ALL {enter}SELECT 'c' AS stage1, 'c5' AS stage2, 55 AS value");
|
||||
|
||||
// wait for the query text change to propagate (it's debounced in QuerySource.jsx)
|
||||
// eslint-disable-next-line cypress/no-unnecessary-waiting
|
||||
cy.wait(200);
|
||||
|
||||
cy.getByTestId("SaveButton").click();
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("ExecuteButton")
|
||||
.should("be.enabled")
|
||||
.click();
|
||||
|
||||
// assert number of rows is 12
|
||||
cy.getByTestId("PivotTableVisualization").contains(".pvtGrandTotal", "12");
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
const percyHealthCheck = require("@percy/cypress/task"); // eslint-disable-line import/no-extraneous-dependencies, import/no-unresolved
|
||||
|
||||
module.exports = (on, config) => {
|
||||
if (config.env.coverage) {
|
||||
require("@cypress/code-coverage/task")(on, config);
|
||||
}
|
||||
on("task", percyHealthCheck);
|
||||
|
||||
return config;
|
||||
};
|
||||
@@ -80,6 +80,10 @@ export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) {
|
||||
.clear()
|
||||
.type(yaxisLabel);
|
||||
|
||||
cy.getByTestId("Chart.LeftYAxis.TickFormat")
|
||||
.clear()
|
||||
.type("+");
|
||||
|
||||
cy.getByTestId("VisualizationEditor.Tabs.General").click();
|
||||
}
|
||||
|
||||
|
||||
6
codecov.yml
Normal file
@@ -0,0 +1,6 @@
|
||||
comment:
|
||||
layout: " diff, flags, files"
|
||||
behavior: default
|
||||
require_changes: false
|
||||
require_base: true
|
||||
require_head: true
|
||||
22
cypress.config.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const { defineConfig } = require('cypress')
|
||||
|
||||
module.exports = defineConfig({
|
||||
e2e: {
|
||||
baseUrl: 'http://localhost:5001',
|
||||
defaultCommandTimeout: 20000,
|
||||
downloadsFolder: 'client/cypress/downloads',
|
||||
fixturesFolder: 'client/cypress/fixtures',
|
||||
requestTimeout: 15000,
|
||||
screenshotsFolder: 'client/cypress/screenshots',
|
||||
specPattern: 'client/cypress/integration/',
|
||||
supportFile: 'client/cypress/support/index.js',
|
||||
video: true,
|
||||
videoUploadOnPasses: false,
|
||||
videosFolder: 'client/cypress/videos',
|
||||
viewportHeight: 1024,
|
||||
viewportWidth: 1280,
|
||||
env: {
|
||||
coverage: false
|
||||
}
|
||||
},
|
||||
})
|
||||
18
cypress.json
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"baseUrl": "http://localhost:5000",
|
||||
"video": true,
|
||||
"videoUploadOnPasses": false,
|
||||
"fixturesFolder": "client/cypress/fixtures",
|
||||
"integrationFolder": "client/cypress/integration",
|
||||
"pluginsFile": "client/cypress/plugins/index.js",
|
||||
"screenshotsFolder": "client/cypress/screenshots",
|
||||
"videosFolder": "client/cypress/videos",
|
||||
"supportFile": "client/cypress/support/index.js",
|
||||
"defaultCommandTimeout": 20000,
|
||||
"requestTimeout": 15000,
|
||||
"viewportWidth": 1280,
|
||||
"viewportHeight": 1024,
|
||||
"env": {
|
||||
"coverage": false
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ x-redash-service: &redash-service
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
skip_frontend_build: "true"
|
||||
skip_frontend_build: "true" # set to empty string to build
|
||||
volumes:
|
||||
- .:/app
|
||||
env_file:
|
||||
@@ -17,7 +17,9 @@ x-redash-environment: &redash-environment
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_MAIL_DEFAULT_SENDER: "redash@example.com"
|
||||
REDASH_MAIL_SERVER: "email"
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_GUNICORN_TIMEOUT: 60
|
||||
# Set secret keys in the .env file
|
||||
services:
|
||||
server:
|
||||
@@ -27,7 +29,7 @@ services:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5000:5000"
|
||||
- "5001:5000"
|
||||
- "5678:5678"
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
@@ -48,21 +50,22 @@ services:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
redis:
|
||||
image: redis:3-alpine
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: postgres:9.5-alpine
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
ports:
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
# improvement on my personal machine). We should consider moving this into a dedicated Docker Compose configuration for
|
||||
# tests.
|
||||
ports:
|
||||
- "15432:5432"
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
email:
|
||||
image: djfarrelly/maildev
|
||||
image: maildev/maildev
|
||||
ports:
|
||||
- "1080:80"
|
||||
- "1080:1080"
|
||||
- "1025:1025"
|
||||
restart: unless-stopped
|
||||
|
||||
@@ -5,5 +5,5 @@ CLI to manage redash.
|
||||
|
||||
from redash.cli import manager
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
manager()
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
"""Make case insensitive hash of query text
|
||||
|
||||
Revision ID: 1038c2174f5d
|
||||
Revises: fd4fc850d7ea
|
||||
Create Date: 2023-07-16 23:10:12.885949
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
|
||||
from redash.utils import gen_query_hash
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1038c2174f5d'
|
||||
down_revision = 'fd4fc850d7ea'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
|
||||
def change_query_hash(conn, table, query_text_to):
|
||||
for record in conn.execute(table.select()):
|
||||
query_text = query_text_to(record.query)
|
||||
conn.execute(
|
||||
table
|
||||
.update()
|
||||
.where(table.c.id == record.id)
|
||||
.values(query_hash=gen_query_hash(query_text)))
|
||||
|
||||
|
||||
def upgrade():
|
||||
queries = table(
|
||||
'queries',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('query', sa.Text),
|
||||
sa.Column('query_hash', sa.String(length=10)))
|
||||
|
||||
conn = op.get_bind()
|
||||
change_query_hash(conn, queries, query_text_to=str)
|
||||
|
||||
|
||||
def downgrade():
|
||||
queries = table(
|
||||
'queries',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('query', sa.Text),
|
||||
sa.Column('query_hash', sa.String(length=10)))
|
||||
|
||||
conn = op.get_bind()
|
||||
change_query_hash(conn, queries, query_text_to=str.lower)
|
||||
60
migrations/versions/fd4fc850d7ea_.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Convert user details to jsonb and move user profile image url into details column
|
||||
|
||||
Revision ID: fd4fc850d7ea
|
||||
Revises: 89bc7873a3e0
|
||||
Create Date: 2022-01-31 15:24:16.507888
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
from redash.models import db
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fd4fc850d7ea'
|
||||
down_revision = '89bc7873a3e0'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
connection = op.get_bind()
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
### end Alembic commands ###
|
||||
|
||||
update_query = """
|
||||
update users
|
||||
set details = details::jsonb || ('{"profile_image_url": "' || profile_image_url || '"}')::jsonb
|
||||
where 1=1
|
||||
"""
|
||||
connection.execute(update_query)
|
||||
op.drop_column("users", "profile_image_url")
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
connection = op.get_bind()
|
||||
op.add_column("users", sa.Column("profile_image_url", db.String(320), nullable=True))
|
||||
|
||||
update_query = """
|
||||
update users set
|
||||
profile_image_url = details->>'profile_image_url',
|
||||
details = details - 'profile_image_url' ;
|
||||
"""
|
||||
|
||||
connection.execute(update_query)
|
||||
db.session.commit()
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::json"))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -1,10 +1,14 @@
|
||||
[build]
|
||||
base = "client"
|
||||
publish = "client/dist"
|
||||
command = "npm ci && npm run build"
|
||||
# Netlify doesn't seem to install Yarn even though NETLIFY_USE_YARN is set below
|
||||
# command = "cd ../ && npm i -g yarn@1.22.19 && yarn --frozen-lockfile --force && cd viz-lib && yarn build:babel && cd .. && rm -r ./node_modules/@redash/viz && cp -r ./viz-lib/. ./node_modules/@redash/viz && yarn build && cd ./client"
|
||||
command = "cd ../ && yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 && yarn build && cd ./client"
|
||||
|
||||
[build.environment]
|
||||
NODE_VERSION = "12.18.4"
|
||||
NODE_VERSION = "16.20.1"
|
||||
NETLIFY_USE_YARN = "true"
|
||||
YARN_VERSION = "1.22.19"
|
||||
CYPRESS_INSTALL_BINARY = "0"
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD = "1"
|
||||
|
||||
|
||||
27751
package-lock.json
generated
109
package.json
@@ -1,41 +1,41 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "10.1.0",
|
||||
"version": "23.09.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "npm-run-all --parallel watch:viz webpack-dev-server",
|
||||
"bundle": "bin/bundle-extensions",
|
||||
"clean": "rm -rf ./client/dist/",
|
||||
"build:viz": "(cd viz-lib && npm run build:babel)",
|
||||
"build": "npm run clean && npm run build:viz && NODE_ENV=production webpack",
|
||||
"build:old-node-version": "npm run clean && NODE_ENV=production node --max-old-space-size=4096 node_modules/.bin/webpack",
|
||||
"watch:app": "webpack --watch --progress --colors -d",
|
||||
"watch:viz": "(cd viz-lib && npm run watch:babel)",
|
||||
"build:viz": "(cd viz-lib && yarn build:babel)",
|
||||
"build": "yarn clean && yarn build:viz && NODE_ENV=production webpack",
|
||||
"build:old-node-version": "yarn clean && NODE_ENV=production node --max-old-space-size=4096 node_modules/.bin/webpack",
|
||||
"watch:app": "webpack watch --progress",
|
||||
"watch:viz": "(cd viz-lib && yarn watch:babel)",
|
||||
"watch": "npm-run-all --parallel watch:*",
|
||||
"webpack-dev-server": "webpack-dev-server",
|
||||
"analyze": "npm run clean && BUNDLE_ANALYZER=on webpack",
|
||||
"analyze:build": "npm run clean && NODE_ENV=production BUNDLE_ANALYZER=on webpack",
|
||||
"lint": "npm run lint:base -- --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:fix": "npm run lint:base -- --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on webpack",
|
||||
"lint": "yarn lint:base --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:fix": "yarn lint:base --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:base": "eslint --config ./client/.eslintrc.js --ignore-path ./client/.eslintignore",
|
||||
"lint:ci": "npm run lint -- --max-warnings 0 --format junit --output-file /tmp/test-results/eslint/results.xml",
|
||||
"lint:ci": "yarn lint --max-warnings 0 --format junit --output-file /tmp/test-results/eslint/results.xml",
|
||||
"prettier": "prettier --write 'client/app/**/*.{js,jsx,ts,tsx}' 'client/cypress/**/*.{js,jsx,ts,tsx}'",
|
||||
"type-check": "tsc --noEmit --project client/tsconfig.json",
|
||||
"type-check:watch": "npm run type-check -- --watch",
|
||||
"type-check:watch": "yarn type-check --watch",
|
||||
"jest": "TZ=Africa/Khartoum jest",
|
||||
"test": "run-s type-check jest",
|
||||
"test:watch": "jest --watch",
|
||||
"cypress": "node client/cypress/cypress.js",
|
||||
"postinstall": "(cd viz-lib && npm ci && npm run build:babel)"
|
||||
"preinstall": "cd viz-lib && yarn link --link-folder ../.yarn",
|
||||
"postinstall": "(cd viz-lib && yarn --frozen-lockfile && yarn build:babel) && yarn link --link-folder ./.yarn @redash/viz"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/getredash/redash.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.0.0",
|
||||
"npm": "^6.0.0"
|
||||
"node": ">14.16.0 <17.0.0",
|
||||
"yarn": "^1.22.10"
|
||||
},
|
||||
"author": "Redash Contributors",
|
||||
"license": "BSD-2-Clause",
|
||||
@@ -48,26 +48,25 @@
|
||||
"@redash/viz": "file:viz-lib",
|
||||
"ace-builds": "^1.4.12",
|
||||
"antd": "^4.4.3",
|
||||
"axios": "^0.21.1",
|
||||
"axios-auth-refresh": "^3.0.0",
|
||||
"axios": "0.27.2",
|
||||
"axios-auth-refresh": "3.3.6",
|
||||
"bootstrap": "^3.3.7",
|
||||
"classnames": "^2.2.6",
|
||||
"d3": "^3.5.17",
|
||||
"debug": "^3.1.0",
|
||||
"dompurify": "^2.0.7",
|
||||
"dompurify": "^2.0.17",
|
||||
"font-awesome": "^4.7.0",
|
||||
"history": "^4.10.1",
|
||||
"hoist-non-react-statics": "^3.3.0",
|
||||
"markdown": "0.5.0",
|
||||
"material-design-iconic-font": "^2.2.0",
|
||||
"moment": "^2.19.3",
|
||||
"mousetrap": "^1.6.1",
|
||||
"mustache": "^2.3.0",
|
||||
"numeral": "^2.0.6",
|
||||
"numbro": "^2.3.6",
|
||||
"path-to-regexp": "^3.1.0",
|
||||
"prop-types": "^15.6.1",
|
||||
"query-string": "^6.9.0",
|
||||
"react": "^16.14.0",
|
||||
"react": "16.14.0",
|
||||
"react-ace": "^9.1.1",
|
||||
"react-dom": "^16.14.0",
|
||||
"react-grid-layout": "^0.18.2",
|
||||
@@ -79,18 +78,18 @@
|
||||
"use-media": "^1.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.10.4",
|
||||
"@babel/core": "^7.10.4",
|
||||
"@babel/plugin-proposal-class-properties": "^7.10.4",
|
||||
"@babel/cli": "^7.22.9",
|
||||
"@babel/core": "^7.22.9",
|
||||
"@babel/plugin-proposal-class-properties": "^7.18.6",
|
||||
"@babel/plugin-transform-object-assign": "^7.2.0",
|
||||
"@babel/preset-env": "^7.10.4",
|
||||
"@babel/preset-env": "^7.22.9",
|
||||
"@babel/preset-react": "^7.0.0",
|
||||
"@babel/preset-typescript": "^7.10.4",
|
||||
"@cypress/code-coverage": "^3.8.1",
|
||||
"@percy/agent": "0.24.3",
|
||||
"@percy/cypress": "^2.3.2",
|
||||
"@pmmmwh/react-refresh-webpack-plugin": "^0.4.3",
|
||||
"@testing-library/cypress": "^7.0.2",
|
||||
"@babel/preset-typescript": "^7.22.5",
|
||||
"@cypress/code-coverage": "^3.11.0",
|
||||
"@percy/agent": "^0.28.7",
|
||||
"@percy/cypress": "^3.1.2",
|
||||
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.10",
|
||||
"@testing-library/cypress": "^8.0.7",
|
||||
"@types/classnames": "^2.2.10",
|
||||
"@types/hoist-non-react-statics": "^3.3.1",
|
||||
"@types/lodash": "^4.14.157",
|
||||
@@ -103,19 +102,20 @@
|
||||
"atob": "^2.1.2",
|
||||
"babel-eslint": "^10.0.3",
|
||||
"babel-jest": "^24.1.0",
|
||||
"babel-loader": "^8.0.5",
|
||||
"babel-plugin-istanbul": "^6.0.0",
|
||||
"babel-loader": "^8.3.0",
|
||||
"babel-plugin-istanbul": "^6.1.1",
|
||||
"babel-plugin-transform-builtin-extend": "^1.1.2",
|
||||
"copy-webpack-plugin": "^4.5.3",
|
||||
"css-loader": "^0.28.7",
|
||||
"cypress": "^5.3.0",
|
||||
"copy-webpack-plugin": "^6.4.1",
|
||||
"css-loader": "^5.2.7",
|
||||
"cypress": "^11.2.0",
|
||||
"dayjs": "^1.11.9",
|
||||
"enzyme": "^3.8.0",
|
||||
"enzyme-adapter-react-16": "^1.7.1",
|
||||
"enzyme-to-json": "^3.3.5",
|
||||
"eslint": "^6.7.2",
|
||||
"eslint-config-prettier": "^6.7.0",
|
||||
"eslint-config-react-app": "^5.1.0",
|
||||
"eslint-loader": "^3.0.3",
|
||||
"eslint-loader": "^4.0.2",
|
||||
"eslint-plugin-chai-friendly": "^0.5.0",
|
||||
"eslint-plugin-compat": "^3.3.0",
|
||||
"eslint-plugin-cypress": "^2.0.1",
|
||||
@@ -126,35 +126,34 @@
|
||||
"eslint-plugin-no-only-tests": "^2.4.0",
|
||||
"eslint-plugin-react": "^7.17.0",
|
||||
"eslint-plugin-react-hooks": "^1.7.0",
|
||||
"file-loader": "^2.0.0",
|
||||
"html-webpack-plugin": "^3.2.0",
|
||||
"file-loader": "^6.2.0",
|
||||
"html-webpack-plugin": "^4.5.2",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "^24.1.0",
|
||||
"less": "^3.9.0",
|
||||
"less-loader": "^4.1.0",
|
||||
"less-plugin-autoprefix": "^1.5.1",
|
||||
"lodash": "^4.17.20",
|
||||
"mini-css-extract-plugin": "^0.4.4",
|
||||
"less": "^3.13.1",
|
||||
"less-loader": "^5.0.0",
|
||||
"less-plugin-autoprefix": "^2.0.0",
|
||||
"lodash": "^4.17.21",
|
||||
"mini-css-extract-plugin": "^1.6.2",
|
||||
"mockdate": "^2.0.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "^1.19.1",
|
||||
"raw-loader": "^0.5.1",
|
||||
"react-refresh": "^0.9.0",
|
||||
"react-test-renderer": "^16.5.2",
|
||||
"request": "^2.88.0",
|
||||
"react-refresh": "^0.14.0",
|
||||
"react-test-renderer": "^16.14.0",
|
||||
"request-cookies": "^1.1.0",
|
||||
"style-loader": "^2.0.0",
|
||||
"typescript": "^4.1.2",
|
||||
"url-loader": "^1.1.2",
|
||||
"webpack": "^4.44.2",
|
||||
"webpack-build-notifier": "^0.1.30",
|
||||
"webpack-bundle-analyzer": "^2.11.1",
|
||||
"webpack-cli": "^3.1.2",
|
||||
"webpack-dev-server": "^3.11.0",
|
||||
"url-loader": "^4.1.1",
|
||||
"webpack": "^4.46.0",
|
||||
"webpack-build-notifier": "^2.3.0",
|
||||
"webpack-bundle-analyzer": "^4.9.0",
|
||||
"webpack-cli": "^4.10.0",
|
||||
"webpack-dev-server": "^4.15.1",
|
||||
"webpack-manifest-plugin": "^2.0.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "^1.2.9"
|
||||
"fsevents": "^2.3.2"
|
||||
},
|
||||
"jest": {
|
||||
"rootDir": "./client",
|
||||
|
||||
18
pyproject.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[project]
|
||||
requires-python = ">=3.8"
|
||||
|
||||
[tool.black]
|
||||
target-version = ['py38']
|
||||
line-length = 119
|
||||
force-exclude = '''
|
||||
/(
|
||||
migrations
|
||||
)/
|
||||
'''
|
||||
|
||||
[tool.isort]
|
||||
py_version = 38
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
use_parentheses = true
|
||||
skip = "migrations"
|
||||
@@ -1,2 +1,5 @@
|
||||
[pytest]
|
||||
norecursedirs = *.egg .eggs dist build docs .tox
|
||||
filterwarnings =
|
||||
once::DeprecationWarning
|
||||
once::PendingDeprecationWarning
|
||||
|
||||
@@ -1,21 +1,20 @@
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import redis
|
||||
from flask_mail import Mail
|
||||
from flask_limiter import Limiter
|
||||
from flask_limiter.util import get_ipaddr
|
||||
from flask_limiter.util import get_remote_address
|
||||
from flask_mail import Mail
|
||||
from flask_migrate import Migrate
|
||||
from statsd import StatsClient
|
||||
|
||||
from . import settings
|
||||
from .app import create_app # noqa
|
||||
from .query_runner import import_query_runners
|
||||
from .destinations import import_destinations
|
||||
from redash import settings
|
||||
from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "10.1.0"
|
||||
__version__ = "23.09.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
@@ -48,10 +47,8 @@ redis_connection = redis.from_url(settings.REDIS_URL)
|
||||
rq_redis_connection = redis.from_url(settings.RQ_REDIS_URL)
|
||||
mail = Mail()
|
||||
migrate = Migrate(compare_type=True)
|
||||
statsd_client = StatsClient(
|
||||
host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX
|
||||
)
|
||||
limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE)
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
limiter = Limiter(key_func=get_remote_address, storage_uri=settings.LIMITER_STORAGE)
|
||||
|
||||
import_query_runners(settings.QUERY_RUNNERS)
|
||||
import_destinations(settings.DESTINATIONS)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from flask import Flask
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
|
||||
from . import settings
|
||||
from redash import settings
|
||||
|
||||
|
||||
class Redash(Flask):
|
||||
@@ -25,7 +25,6 @@ class Redash(Flask):
|
||||
def create_app():
|
||||
from . import (
|
||||
authentication,
|
||||
extensions,
|
||||
handlers,
|
||||
limiter,
|
||||
mail,
|
||||
@@ -43,7 +42,7 @@ def create_app():
|
||||
app = Redash()
|
||||
|
||||
# Check and update the cached version for use by the client
|
||||
app.before_first_request(reset_new_version_status)
|
||||
reset_new_version_status()
|
||||
|
||||
security.init_app(app)
|
||||
request_metrics.init_app(app)
|
||||
@@ -54,7 +53,6 @@ def create_app():
|
||||
limiter.init_app(app)
|
||||
handlers.init_app(app)
|
||||
configure_webpack(app)
|
||||
extensions.init_app(app)
|
||||
users.init_app(app)
|
||||
tasks.init_app(app)
|
||||
|
||||
|
||||
@@ -5,27 +5,26 @@ import time
|
||||
from datetime import timedelta
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
from flask import jsonify, redirect, request, url_for, session
|
||||
from flask import jsonify, redirect, request, session, url_for
|
||||
from flask_login import LoginManager, login_user, logout_user, user_logged_in
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
from redash import models, settings
|
||||
from redash.authentication import jwt_auth
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.settings.organization import settings as org_settings
|
||||
from redash.tasks import record_event
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
login_manager = LoginManager()
|
||||
logger = logging.getLogger("authentication")
|
||||
|
||||
|
||||
def get_login_url(external=False, next="/"):
|
||||
if settings.MULTI_ORG and current_org == None:
|
||||
if settings.MULTI_ORG and current_org == None: # noqa: E711
|
||||
login_url = "/"
|
||||
elif settings.MULTI_ORG:
|
||||
login_url = url_for(
|
||||
"redash.login", org_slug=current_org.slug, next=next, _external=external
|
||||
)
|
||||
login_url = url_for("redash.login", org_slug=current_org.slug, next=next, _external=external)
|
||||
else:
|
||||
login_url = url_for("redash.login", next=next, _external=external)
|
||||
|
||||
@@ -68,11 +67,7 @@ def request_loader(request):
|
||||
elif settings.AUTH_TYPE == "api_key":
|
||||
user = api_key_load_user_from_request(request)
|
||||
else:
|
||||
logger.warning(
|
||||
"Unknown authentication type ({}). Using default (HMAC).".format(
|
||||
settings.AUTH_TYPE
|
||||
)
|
||||
)
|
||||
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
|
||||
user = hmac_load_user_from_request(request)
|
||||
|
||||
if org_settings["auth_jwt_login_enabled"] and user is None:
|
||||
@@ -192,6 +187,10 @@ def jwt_token_load_user_from_request(request):
|
||||
if not payload:
|
||||
return
|
||||
|
||||
if "email" not in payload:
|
||||
logger.info("No email field in token, refusing to login")
|
||||
return
|
||||
|
||||
try:
|
||||
user = models.User.get_by_email_and_org(payload["email"], org)
|
||||
except models.NoResultFound:
|
||||
@@ -216,12 +215,9 @@ def log_user_logged_in(app, user):
|
||||
|
||||
@login_manager.unauthorized_handler
|
||||
def redirect_to_login():
|
||||
if request.is_xhr or "/api/" in request.path:
|
||||
response = jsonify(
|
||||
{"message": "Couldn't find resource. Please login and try again."}
|
||||
)
|
||||
response.status_code = 404
|
||||
return response
|
||||
is_xhr = request.headers.get("X-Requested-With") == "XMLHttpRequest"
|
||||
if is_xhr or "/api/" in request.path:
|
||||
return {"message": "Couldn't find resource. Please login and try again."}, 404
|
||||
|
||||
login_url = get_login_url(next=request.url, external=False)
|
||||
|
||||
@@ -231,7 +227,7 @@ def redirect_to_login():
|
||||
def logout_and_redirect_to_index():
|
||||
logout_user()
|
||||
|
||||
if settings.MULTI_ORG and current_org == None:
|
||||
if settings.MULTI_ORG and current_org == None: # noqa: E711
|
||||
index_url = "/"
|
||||
elif settings.MULTI_ORG:
|
||||
index_url = url_for("redash.index", org_slug=current_org.slug, _external=False)
|
||||
@@ -242,14 +238,11 @@ def logout_and_redirect_to_index():
|
||||
|
||||
|
||||
def init_app(app):
|
||||
from redash.authentication import (
|
||||
saml_auth,
|
||||
remote_user_auth,
|
||||
ldap_auth,
|
||||
from redash.authentication import ldap_auth, remote_user_auth, saml_auth
|
||||
from redash.authentication.google_oauth import (
|
||||
create_google_oauth_blueprint,
|
||||
)
|
||||
|
||||
from redash.authentication.google_oauth import create_google_oauth_blueprint
|
||||
|
||||
login_manager.init_app(app)
|
||||
login_manager.anonymous_user = models.AnonymousUser
|
||||
login_manager.REMEMBER_COOKIE_DURATION = settings.REMEMBER_COOKIE_DURATION
|
||||
@@ -262,7 +255,12 @@ def init_app(app):
|
||||
from redash.security import csrf
|
||||
|
||||
# Authlib's flask oauth client requires a Flask app to initialize
|
||||
for blueprint in [create_google_oauth_blueprint(app), saml_auth.blueprint, remote_user_auth.blueprint, ldap_auth.blueprint, ]:
|
||||
for blueprint in [
|
||||
create_google_oauth_blueprint(app),
|
||||
saml_auth.blueprint,
|
||||
remote_user_auth.blueprint,
|
||||
ldap_auth.blueprint,
|
||||
]:
|
||||
csrf.exempt(blueprint)
|
||||
app.register_blueprint(blueprint)
|
||||
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import logging
|
||||
|
||||
from flask import render_template
|
||||
from itsdangerous import URLSafeTimedSerializer
|
||||
|
||||
from redash import settings
|
||||
from redash.tasks import send_mail
|
||||
from redash.utils import base_url
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from itsdangerous import URLSafeTimedSerializer, SignatureExpired, BadSignature
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
serializer = URLSafeTimedSerializer(settings.SECRET_KEY)
|
||||
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from flask import redirect, url_for, Blueprint, flash, request, session
|
||||
from authlib.integrations.flask_client import OAuth
|
||||
from flask import Blueprint, flash, redirect, request, session, url_for
|
||||
|
||||
|
||||
from redash import models, settings
|
||||
from redash import models
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
logout_and_redirect_to_index,
|
||||
get_next_path,
|
||||
logout_and_redirect_to_index,
|
||||
)
|
||||
from redash.authentication.org_resolving import current_org
|
||||
|
||||
from authlib.integrations.flask_client import OAuth
|
||||
|
||||
|
||||
def verify_profile(org, profile):
|
||||
if org.is_public:
|
||||
@@ -46,9 +45,7 @@ def create_google_oauth_blueprint(app):
|
||||
|
||||
def get_user_profile(access_token):
|
||||
headers = {"Authorization": "OAuth {}".format(access_token)}
|
||||
response = requests.get(
|
||||
"https://www.googleapis.com/oauth2/v1/userinfo", headers=headers
|
||||
)
|
||||
response = requests.get("https://www.googleapis.com/oauth2/v1/userinfo", headers=headers)
|
||||
|
||||
if response.status_code == 401:
|
||||
logger.warning("Failed getting user profile (response code 401).")
|
||||
@@ -63,12 +60,9 @@ def create_google_oauth_blueprint(app):
|
||||
|
||||
@blueprint.route("/oauth/google", endpoint="authorize")
|
||||
def login():
|
||||
|
||||
redirect_uri = url_for(".callback", _external=True)
|
||||
|
||||
next_path = request.args.get(
|
||||
"next", url_for("redash.index", org_slug=session.get("org_slug"))
|
||||
)
|
||||
next_path = request.args.get("next", url_for("redash.index", org_slug=session.get("org_slug")))
|
||||
logger.debug("Callback url: %s", redirect_uri)
|
||||
logger.debug("Next is: %s", next_path)
|
||||
|
||||
@@ -78,7 +72,6 @@ def create_google_oauth_blueprint(app):
|
||||
|
||||
@blueprint.route("/oauth/google_callback", endpoint="callback")
|
||||
def authorized():
|
||||
|
||||
logger.debug("Authorized user inbound")
|
||||
|
||||
resp = oauth.google.authorize_access_token()
|
||||
@@ -109,21 +102,15 @@ def create_google_oauth_blueprint(app):
|
||||
profile["email"],
|
||||
org,
|
||||
)
|
||||
flash(
|
||||
"Your Google Apps account ({}) isn't allowed.".format(profile["email"])
|
||||
)
|
||||
flash("Your Google Apps account ({}) isn't allowed.".format(profile["email"]))
|
||||
return redirect(url_for("redash.login", org_slug=org.slug))
|
||||
|
||||
picture_url = "%s?sz=40" % profile["picture"]
|
||||
user = create_and_login_user(
|
||||
org, profile["name"], profile["email"], picture_url
|
||||
)
|
||||
user = create_and_login_user(org, profile["name"], profile["email"], picture_url)
|
||||
if user is None:
|
||||
return logout_and_redirect_to_index()
|
||||
|
||||
unsafe_next_path = session.get("next_url") or url_for(
|
||||
"redash.index", org_slug=org.slug
|
||||
)
|
||||
unsafe_next_path = session.get("next_url") or url_for("redash.index", org_slug=org.slug)
|
||||
next_path = get_next_path(unsafe_next_path)
|
||||
|
||||
return redirect(next_path)
|
||||
|
||||
@@ -1,10 +1,39 @@
|
||||
import logging
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger("jwt_auth")
|
||||
|
||||
FILE_SCHEME_PREFIX = "file://"
|
||||
|
||||
|
||||
def get_public_key_from_file(url):
|
||||
file_path = url[len(FILE_SCHEME_PREFIX) :]
|
||||
with open(file_path) as key_file:
|
||||
key_str = key_file.read()
|
||||
|
||||
get_public_keys.key_cache[url] = [key_str]
|
||||
return key_str
|
||||
|
||||
|
||||
def get_public_key_from_net(url):
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if "keys" in data:
|
||||
public_keys = []
|
||||
for key_dict in data["keys"]:
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(simplejson.dumps(key_dict))
|
||||
public_keys.append(public_key)
|
||||
|
||||
get_public_keys.key_cache[url] = public_keys
|
||||
return public_keys
|
||||
else:
|
||||
get_public_keys.key_cache[url] = data
|
||||
return data
|
||||
|
||||
|
||||
def get_public_keys(url):
|
||||
"""
|
||||
@@ -12,33 +41,21 @@ def get_public_keys(url):
|
||||
List of RSA public keys usable by PyJWT.
|
||||
"""
|
||||
key_cache = get_public_keys.key_cache
|
||||
keys = {}
|
||||
if url in key_cache:
|
||||
return key_cache[url]
|
||||
keys = key_cache[url]
|
||||
else:
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if "keys" in data:
|
||||
public_keys = []
|
||||
for key_dict in data["keys"]:
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(
|
||||
simplejson.dumps(key_dict)
|
||||
)
|
||||
public_keys.append(public_key)
|
||||
|
||||
get_public_keys.key_cache[url] = public_keys
|
||||
return public_keys
|
||||
if url.startswith(FILE_SCHEME_PREFIX):
|
||||
keys = [get_public_key_from_file(url)]
|
||||
else:
|
||||
get_public_keys.key_cache[url] = data
|
||||
return data
|
||||
keys = get_public_key_from_net(url)
|
||||
return keys
|
||||
|
||||
|
||||
get_public_keys.key_cache = {}
|
||||
|
||||
|
||||
def verify_jwt_token(
|
||||
jwt_token, expected_issuer, expected_audience, algorithms, public_certs_url
|
||||
):
|
||||
def verify_jwt_token(jwt_token, expected_issuer, expected_audience, algorithms, public_certs_url):
|
||||
# https://developers.cloudflare.com/access/setting-up-access/validate-jwt-tokens/
|
||||
# https://cloud.google.com/iap/docs/signed-headers-howto
|
||||
# Loop through the keys since we can't pass the key set to the decoder
|
||||
@@ -53,9 +70,7 @@ def verify_jwt_token(
|
||||
for key in keys:
|
||||
try:
|
||||
# decode returns the claims which has the email if you need it
|
||||
payload = jwt.decode(
|
||||
jwt_token, key=key, audience=expected_audience, algorithms=algorithms
|
||||
)
|
||||
payload = jwt.decode(jwt_token, key=key, audience=expected_audience, algorithms=algorithms)
|
||||
issuer = payload["iss"]
|
||||
if issuer != expected_issuer:
|
||||
raise Exception("Wrong issuer: {}".format(issuer))
|
||||
@@ -63,4 +78,5 @@ def verify_jwt_token(
|
||||
break
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
|
||||
return payload, valid_token
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash import settings
|
||||
|
||||
from flask import flash, redirect, render_template, request, url_for, Blueprint
|
||||
from flask import Blueprint, flash, redirect, render_template, request, url_for
|
||||
from flask_login import current_user
|
||||
|
||||
from redash import settings
|
||||
|
||||
try:
|
||||
from ldap3 import Server, Connection
|
||||
from ldap3 import Connection, Server
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit(
|
||||
@@ -16,8 +16,8 @@ except ImportError:
|
||||
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
logout_and_redirect_to_index,
|
||||
get_next_path,
|
||||
logout_and_redirect_to_index,
|
||||
)
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import logging
|
||||
from flask import redirect, url_for, Blueprint, request
|
||||
|
||||
from flask import Blueprint, redirect, request, url_for
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
logout_and_redirect_to_index,
|
||||
get_next_path,
|
||||
logout_and_redirect_to_index,
|
||||
)
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
from redash import settings
|
||||
|
||||
logger = logging.getLogger("remote_user_auth")
|
||||
|
||||
@@ -20,9 +22,7 @@ def login(org_slug=None):
|
||||
next_path = get_next_path(unsafe_next_path)
|
||||
|
||||
if not settings.REMOTE_USER_LOGIN_ENABLED:
|
||||
logger.error(
|
||||
"Cannot use remote user for login without being enabled in settings"
|
||||
)
|
||||
logger.error("Cannot use remote user for login without being enabled in settings")
|
||||
return redirect(url_for("redash.index", next=next_path, org_slug=org_slug))
|
||||
|
||||
email = request.headers.get(settings.REMOTE_USER_HEADER)
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
import logging
|
||||
from flask import flash, redirect, url_for, Blueprint, request
|
||||
from redash import settings
|
||||
from redash.authentication import create_and_login_user, logout_and_redirect_to_index
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
from redash.utils import mustache_render
|
||||
|
||||
from flask import Blueprint, flash, redirect, request, url_for
|
||||
from saml2 import BINDING_HTTP_POST, BINDING_HTTP_REDIRECT, entity
|
||||
from saml2.client import Saml2Client
|
||||
from saml2.config import Config as Saml2Config
|
||||
from saml2.saml import NAMEID_FORMAT_TRANSIENT
|
||||
from saml2.sigver import get_xmlsec_binary
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
logout_and_redirect_to_index,
|
||||
)
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
from redash.utils import mustache_render
|
||||
|
||||
logger = logging.getLogger("saml_auth")
|
||||
blueprint = Blueprint("saml_auth", __name__)
|
||||
@@ -29,6 +33,7 @@ def get_saml_client(org):
|
||||
sso_url = org.get_setting("auth_saml_sso_url")
|
||||
x509_cert = org.get_setting("auth_saml_x509_cert")
|
||||
metadata_url = org.get_setting("auth_saml_metadata_url")
|
||||
sp_settings = org.get_setting("auth_saml_sp_settings")
|
||||
|
||||
if settings.SAML_SCHEME_OVERRIDE:
|
||||
acs_url = url_for(
|
||||
@@ -88,6 +93,11 @@ def get_saml_client(org):
|
||||
if acs_url is not None and acs_url != "":
|
||||
saml_settings["entityid"] = acs_url
|
||||
|
||||
if sp_settings:
|
||||
import json
|
||||
|
||||
saml_settings["service"]["sp"].update(json.loads(sp_settings))
|
||||
|
||||
sp_config = Saml2Config()
|
||||
sp_config.load(saml_settings)
|
||||
sp_config.allow_unknown_attributes = True
|
||||
|
||||
@@ -4,14 +4,21 @@ from flask import current_app
|
||||
from flask.cli import FlaskGroup, run_command, with_appcontext
|
||||
from rq import Connection
|
||||
|
||||
from redash import __version__, create_app, settings, rq_redis_connection
|
||||
from redash.cli import data_sources, database, groups, organization, queries, users, rq
|
||||
from redash import __version__, create_app, rq_redis_connection, settings
|
||||
from redash.cli import (
|
||||
data_sources,
|
||||
database,
|
||||
groups,
|
||||
organization,
|
||||
queries,
|
||||
rq,
|
||||
users,
|
||||
)
|
||||
from redash.monitor import get_status
|
||||
|
||||
|
||||
def create(group):
|
||||
def create():
|
||||
app = current_app or create_app()
|
||||
group.app = app
|
||||
|
||||
@app.shell_context_processor
|
||||
def shell_context():
|
||||
@@ -62,25 +69,23 @@ def send_test_mail(email=None):
|
||||
"""
|
||||
Send test message to EMAIL (default: the address you defined in MAIL_DEFAULT_SENDER)
|
||||
"""
|
||||
from redash import mail
|
||||
from flask_mail import Message
|
||||
|
||||
from redash import mail
|
||||
|
||||
if email is None:
|
||||
email = settings.MAIL_DEFAULT_SENDER
|
||||
|
||||
mail.send(
|
||||
Message(
|
||||
subject="Test Message from Redash", recipients=[email], body="Test message."
|
||||
)
|
||||
)
|
||||
mail.send(Message(subject="Test Message from Redash", recipients=[email], body="Test message."))
|
||||
|
||||
|
||||
@manager.command("shell")
|
||||
@with_appcontext
|
||||
def shell():
|
||||
import sys
|
||||
from ptpython import repl
|
||||
|
||||
from flask.globals import _app_ctx_stack
|
||||
from ptpython import repl
|
||||
|
||||
app = _app_ctx_stack.top.app
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from sys import exit
|
||||
|
||||
import click
|
||||
from click.types import convert_type
|
||||
from flask.cli import AppGroup
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
@@ -33,14 +34,10 @@ def list_command(organization=None):
|
||||
if i > 0:
|
||||
print("-" * 20)
|
||||
|
||||
print(
|
||||
"Id: {}\nName: {}\nType: {}\nOptions: {}".format(
|
||||
ds.id, ds.name, ds.type, ds.options.to_json()
|
||||
)
|
||||
)
|
||||
print("Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options.to_json()))
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="list_types")
|
||||
def list_types():
|
||||
print("Enabled Query Runners:")
|
||||
types = sorted(query_runners.keys())
|
||||
@@ -75,9 +72,7 @@ def test(name, organization="default"):
|
||||
data_source = models.DataSource.query.filter(
|
||||
models.DataSource.name == name, models.DataSource.org == org
|
||||
).one()
|
||||
print(
|
||||
"Testing connection to data source: {} (id={})".format(name, data_source.id)
|
||||
)
|
||||
print("Testing connection to data source: {} (id={})".format(name, data_source.id))
|
||||
try:
|
||||
data_source.query_runner.test_connection()
|
||||
except Exception as e:
|
||||
@@ -139,11 +134,19 @@ def new(name=None, type=None, options=None, organization="default"):
|
||||
else:
|
||||
prompt = "{} (optional)".format(prompt)
|
||||
|
||||
_type = types[prop["type"]]
|
||||
|
||||
def value_proc(value):
|
||||
if value == default_value:
|
||||
return default_value
|
||||
return convert_type(_type, default_value)(value)
|
||||
|
||||
value = click.prompt(
|
||||
prompt,
|
||||
default=default_value,
|
||||
type=types[prop["type"]],
|
||||
type=_type,
|
||||
show_default=False,
|
||||
value_proc=value_proc,
|
||||
)
|
||||
if value != default_value:
|
||||
options_obj[k] = value
|
||||
@@ -154,13 +157,9 @@ def new(name=None, type=None, options=None, organization="default"):
|
||||
|
||||
if not options.is_valid():
|
||||
print("Error: invalid configuration.")
|
||||
exit()
|
||||
exit(1)
|
||||
|
||||
print(
|
||||
"Creating {} data source ({}) with options:\n{}".format(
|
||||
type, name, options.to_json()
|
||||
)
|
||||
)
|
||||
print("Creating {} data source ({}) with options:\n{}".format(type, name, options.to_json()))
|
||||
|
||||
data_source = models.DataSource.create_with_group(
|
||||
name=name,
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
import sqlalchemy
|
||||
from click import argument, option
|
||||
from cryptography.fernet import InvalidToken
|
||||
from flask.cli import AppGroup
|
||||
from flask_migrate import stamp
|
||||
import sqlalchemy
|
||||
from sqlalchemy.exc import DatabaseError
|
||||
from sqlalchemy.sql import select
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
@@ -41,7 +43,7 @@ def load_extensions(db):
|
||||
connection.execute(f'CREATE EXTENSION IF NOT EXISTS "{extension}";')
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="create_tables")
|
||||
def create_tables():
|
||||
"""Create the database tables."""
|
||||
from redash.models import db
|
||||
@@ -61,7 +63,7 @@ def create_tables():
|
||||
stamp()
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="drop_tables")
|
||||
def drop_tables():
|
||||
"""Drop the database tables."""
|
||||
from redash.models import db
|
||||
@@ -81,8 +83,6 @@ def reencrypt(old_secret, new_secret, show_sql):
|
||||
_wait_for_db_connection(db)
|
||||
|
||||
if show_sql:
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
||||
|
||||
@@ -93,9 +93,7 @@ def reencrypt(old_secret, new_secret, show_sql):
|
||||
Column("id", key_type(orm_name), primary_key=True),
|
||||
Column(
|
||||
"encrypted_options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(db.Text, old_secret, FernetEngine)
|
||||
),
|
||||
ConfigurationContainer.as_mutable(EncryptedConfiguration(db.Text, old_secret, FernetEngine)),
|
||||
),
|
||||
)
|
||||
table_for_update = sqlalchemy.Table(
|
||||
@@ -104,19 +102,21 @@ def reencrypt(old_secret, new_secret, show_sql):
|
||||
Column("id", key_type(orm_name), primary_key=True),
|
||||
Column(
|
||||
"encrypted_options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(db.Text, new_secret, FernetEngine)
|
||||
),
|
||||
ConfigurationContainer.as_mutable(EncryptedConfiguration(db.Text, new_secret, FernetEngine)),
|
||||
),
|
||||
)
|
||||
|
||||
update = table_for_update.update()
|
||||
selected_items = db.session.execute(select([table_for_select]))
|
||||
for item in selected_items:
|
||||
stmt = update.where(table_for_update.c.id == item["id"]).values(
|
||||
encrypted_options=item["encrypted_options"]
|
||||
)
|
||||
db.session.execute(stmt)
|
||||
try:
|
||||
stmt = update.where(table_for_update.c.id == item["id"]).values(
|
||||
encrypted_options=item["encrypted_options"]
|
||||
)
|
||||
except InvalidToken:
|
||||
logging.error(f'Invalid Decryption Key for id {item["id"]} in table {table_for_select}')
|
||||
else:
|
||||
db.session.execute(stmt)
|
||||
|
||||
selected_items.close()
|
||||
db.session.commit()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from sys import exit
|
||||
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from flask.cli import AppGroup
|
||||
from click import argument, option
|
||||
from flask.cli import AppGroup
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from redash import models
|
||||
|
||||
@@ -43,7 +43,7 @@ def create(name, permissions=None, organization="default"):
|
||||
exit(1)
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="change_permissions")
|
||||
@argument("group_id")
|
||||
@option(
|
||||
"--permissions",
|
||||
@@ -60,14 +60,11 @@ def change_permissions(group_id, permissions=None):
|
||||
try:
|
||||
group = models.Group.query.get(group_id)
|
||||
except NoResultFound:
|
||||
print("User [%s] not found." % group_id)
|
||||
print("Group [%s] not found." % group_id)
|
||||
exit(1)
|
||||
|
||||
permissions = extract_permissions_string(permissions)
|
||||
print(
|
||||
"current permissions [%s] will be modify to [%s]"
|
||||
% (",".join(group.permissions), ",".join(permissions))
|
||||
)
|
||||
print("current permissions [%s] will be modify to [%s]" % (",".join(group.permissions), ",".join(permissions)))
|
||||
|
||||
group.permissions = permissions
|
||||
|
||||
@@ -119,4 +116,7 @@ def list_command(organization=None):
|
||||
|
||||
members = models.Group.members(group.id)
|
||||
user_names = [m.name for m in members]
|
||||
print("Users: {}".format(", ".join(user_names)))
|
||||
if user_names:
|
||||
print("Users: {}".format(", ".join(user_names)))
|
||||
else:
|
||||
print("Users:")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from click import argument
|
||||
from click import argument, option
|
||||
from flask.cli import AppGroup
|
||||
|
||||
from redash import models
|
||||
@@ -6,7 +6,7 @@ from redash import models
|
||||
manager = AppGroup(help="Organization management commands.")
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="set_google_apps_domains")
|
||||
@argument("domains")
|
||||
def set_google_apps_domains(domains):
|
||||
"""
|
||||
@@ -17,21 +17,32 @@ def set_google_apps_domains(domains):
|
||||
organization.settings[k] = domains.split(",")
|
||||
models.db.session.add(organization)
|
||||
models.db.session.commit()
|
||||
print(
|
||||
"Updated list of allowed domains to: {}".format(
|
||||
organization.google_apps_domains
|
||||
)
|
||||
)
|
||||
print("Updated list of allowed domains to: {}".format(organization.google_apps_domains))
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="show_google_apps_domains")
|
||||
def show_google_apps_domains():
|
||||
organization = models.Organization.query.first()
|
||||
print(
|
||||
"Current list of Google Apps domains: {}".format(
|
||||
", ".join(organization.google_apps_domains)
|
||||
)
|
||||
)
|
||||
print("Current list of Google Apps domains: {}".format(", ".join(organization.google_apps_domains)))
|
||||
|
||||
|
||||
@manager.command(name="create")
|
||||
@argument("name")
|
||||
@option(
|
||||
"--slug",
|
||||
"slug",
|
||||
default="default",
|
||||
help="The slug the organization belongs to (leave blank for " "'default').",
|
||||
)
|
||||
def create(name, slug="default"):
|
||||
print("Creating organization (%s)..." % (name))
|
||||
|
||||
try:
|
||||
models.db.session.add(models.Organization(name=name, slug=slug, settings={}))
|
||||
models.db.session.commit()
|
||||
except Exception as e:
|
||||
print("Failed create organization: %s" % e)
|
||||
exit(1)
|
||||
|
||||
|
||||
@manager.command(name="list")
|
||||
|
||||
@@ -5,7 +5,7 @@ from sqlalchemy.orm.exc import NoResultFound
|
||||
manager = AppGroup(help="Queries management commands.")
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="add_tag")
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
def add_tag(query_id, tag):
|
||||
@@ -31,7 +31,7 @@ def add_tag(query_id, tag):
|
||||
print("Tag added.")
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="remove_tag")
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
def remove_tag(query_id, tag):
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from __future__ import absolute_import
|
||||
import socket
|
||||
import sys
|
||||
import datetime
|
||||
import socket
|
||||
from itertools import chain
|
||||
|
||||
from click import argument
|
||||
@@ -14,11 +12,11 @@ from supervisor_checks.check_modules import base
|
||||
|
||||
from redash import rq_redis_connection
|
||||
from redash.tasks import (
|
||||
Worker,
|
||||
periodic_job_definitions,
|
||||
rq_scheduler,
|
||||
schedule_periodic_jobs,
|
||||
periodic_job_definitions,
|
||||
)
|
||||
from redash.tasks.worker import Worker
|
||||
from redash.worker import default_queues
|
||||
|
||||
manager = AppGroup(help="RQ management commands.")
|
||||
@@ -55,11 +53,7 @@ class WorkerHealthcheck(base.BaseCheck):
|
||||
def __call__(self, process_spec):
|
||||
pid = process_spec["pid"]
|
||||
all_workers = Worker.all(connection=rq_redis_connection)
|
||||
workers = [
|
||||
w
|
||||
for w in all_workers
|
||||
if w.hostname == socket.gethostname() and w.pid == pid
|
||||
]
|
||||
workers = [w for w in all_workers if w.hostname == socket.gethostname() and w.pid == pid]
|
||||
|
||||
if not workers:
|
||||
self._log(f"Cannot find worker for hostname {socket.gethostname()} and pid {pid}. ==> Is healthy? False")
|
||||
@@ -96,6 +90,4 @@ class WorkerHealthcheck(base.BaseCheck):
|
||||
|
||||
@manager.command()
|
||||
def healthcheck():
|
||||
return check_runner.CheckRunner(
|
||||
"worker_healthcheck", "worker", None, [(WorkerHealthcheck, {})]
|
||||
).run()
|
||||
return check_runner.CheckRunner("worker_healthcheck", "worker", None, [(WorkerHealthcheck, {})]).run()
|
||||
|
||||
@@ -2,8 +2,8 @@ from sys import exit
|
||||
|
||||
from click import BOOL, argument, option, prompt
|
||||
from flask.cli import AppGroup
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from redash import models
|
||||
from redash.handlers.users import invite_user
|
||||
@@ -26,7 +26,7 @@ def build_groups(org, groups, is_admin):
|
||||
return groups
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="grant_admin")
|
||||
@argument("email")
|
||||
@option(
|
||||
"--org",
|
||||
@@ -116,7 +116,7 @@ def create(
|
||||
exit(1)
|
||||
|
||||
|
||||
@manager.command()
|
||||
@manager.command(name="create_root")
|
||||
@argument("email")
|
||||
@argument("name")
|
||||
@option(
|
||||
@@ -136,17 +136,13 @@ def create(
|
||||
"--password",
|
||||
"password",
|
||||
default=None,
|
||||
help="Password for root user who don't use Google Auth "
|
||||
"(leave blank for prompt).",
|
||||
help="Password for root user who don't use Google Auth (leave blank for prompt).",
|
||||
)
|
||||
def create_root(email, name, google_auth=False, password=None, organization="default"):
|
||||
"""
|
||||
Create root user.
|
||||
"""
|
||||
print(
|
||||
"Creating root user (%s, %s) in organization %s..."
|
||||
% (email, name, organization)
|
||||
)
|
||||
print("Creating root user (%s, %s) in organization %s..." % (email, name, organization))
|
||||
print("Login with Google Auth: %r\n" % google_auth)
|
||||
|
||||
user = models.User.query.filter(models.User.email == email).first()
|
||||
@@ -155,15 +151,13 @@ def create_root(email, name, google_auth=False, password=None, organization="def
|
||||
exit(1)
|
||||
|
||||
org_slug = organization
|
||||
org = models.Organization.query.filter(
|
||||
models.Organization.slug == org_slug
|
||||
).first()
|
||||
org = models.Organization.query.filter(models.Organization.slug == org_slug).first()
|
||||
if org is None:
|
||||
org = models.Organization(name=org_slug, slug=org_slug, settings={})
|
||||
|
||||
admin_group = models.Group(
|
||||
name="admin",
|
||||
permissions=["admin", "super_admin"],
|
||||
permissions=models.Group.ADMIN_PERMISSIONS,
|
||||
org=org,
|
||||
type=models.Group.BUILTIN_GROUP,
|
||||
)
|
||||
@@ -208,13 +202,9 @@ def delete(email, organization=None):
|
||||
"""
|
||||
if organization:
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
deleted_count = models.User.query.filter(
|
||||
models.User.email == email, models.User.org == org.id
|
||||
).delete()
|
||||
deleted_count = models.User.query.filter(models.User.email == email, models.User.org == org.id).delete()
|
||||
else:
|
||||
deleted_count = models.User.query.filter(models.User.email == email).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
deleted_count = models.User.query.filter(models.User.email == email).delete(synchronize_session=False)
|
||||
models.db.session.commit()
|
||||
print("Deleted %d users." % deleted_count)
|
||||
|
||||
@@ -234,9 +224,7 @@ def password(email, password, organization=None):
|
||||
"""
|
||||
if organization:
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
user = models.User.query.filter(
|
||||
models.User.email == email, models.User.org == org
|
||||
).first()
|
||||
user = models.User.query.filter(models.User.email == email, models.User.org == org).first()
|
||||
else:
|
||||
user = models.User.query.filter(models.User.email == email).first()
|
||||
|
||||
@@ -265,7 +253,7 @@ def password(email, password, organization=None):
|
||||
"--groups",
|
||||
"groups",
|
||||
default=None,
|
||||
help="Comma seperated list of groups (leave blank for default).",
|
||||
help="Comma separated list of groups (leave blank for default).",
|
||||
)
|
||||
def invite(email, name, inviter_email, groups, is_admin=False, organization="default"):
|
||||
"""
|
||||
|
||||
@@ -31,7 +31,7 @@ class BaseDestination(object):
|
||||
def configuration_schema(cls):
|
||||
return {}
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
def notify(self, alert, query, user, new_state, app, host, metadata, options):
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
@@ -41,7 +41,7 @@ class BaseDestination(object):
|
||||
"type": cls.type(),
|
||||
"icon": cls.icon(),
|
||||
"configuration_schema": cls.configuration_schema(),
|
||||
**({ "deprecated": True } if cls.deprecated else {})
|
||||
**({"deprecated": True} if cls.deprecated else {}),
|
||||
}
|
||||
|
||||
|
||||
|
||||