Compare commits
9 Commits
v25.8
...
release/10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2589bef1f2 | ||
|
|
1c5ceecd50 | ||
|
|
41f948201a | ||
|
|
9c928bd1d3 | ||
|
|
f312adf77b | ||
|
|
92e5d78dde | ||
|
|
0983e6926f | ||
|
|
dec88799ab | ||
|
|
64a1d7a6cd |
@@ -1,12 +0,0 @@
|
||||
FROM cypress/browsers:node18.12.0-chrome106-ff106
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json yarn.lock .yarnrc $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm install yarn@1.22.22 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
RUN ./node_modules/.bin/cypress verify
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script only needs to run on the main Redash repo
|
||||
|
||||
if [ "${GITHUB_REPOSITORY}" != "getredash/redash" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the main Redash repository"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${GITHUB_REF_NAME}" != "master" ] && [ "${GITHUB_REF_NAME}" != "preview-image" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the 'master' nor 'preview-image' branch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "x${DOCKER_USER}" = "x" ] || [ "x${DOCKER_PASS}" = "x" ]; then
|
||||
echo "Skipping image build for Docker Hub, as the login details aren't available"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
set -e
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG="$VERSION.b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}"
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
|
||||
docker login -u "${DOCKER_USER}" -p "${DOCKER_PASS}"
|
||||
|
||||
DOCKERHUB_REPO="redash/redash"
|
||||
DOCKER_TAGS="-t redash/redash:preview -t redash/preview:${VERSION_TAG}"
|
||||
|
||||
# Build the docker container
|
||||
docker build --build-arg install_groups="main,all_ds,dev" ${DOCKER_TAGS} .
|
||||
|
||||
# Push the container to the preview build locations
|
||||
docker push "${DOCKERHUB_REPO}:preview"
|
||||
docker push "redash/preview:${VERSION_TAG}"
|
||||
|
||||
echo "Built: ${VERSION_TAG}"
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=${VERSION}+b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}
|
||||
|
||||
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py
|
||||
sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json
|
||||
12
.circleci/Dockerfile.cypress
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM cypress/browsers:node14.0.0-chrome84
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json package-lock.json $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm ci > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
RUN ./node_modules/.bin/cypress verify
|
||||
177
.circleci/config.yml
Normal file
@@ -0,0 +1,177 @@
|
||||
version: 2.0
|
||||
|
||||
build-docker-image-job: &build-docker-image-job
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install python3-pip
|
||||
- run: sudo pip3 install -r requirements_bundles.txt
|
||||
- run: .circleci/update_version
|
||||
- run: npm run bundle
|
||||
- run: .circleci/docker_build
|
||||
jobs:
|
||||
backend-lint:
|
||||
docker:
|
||||
- image: circleci/python:3.7.0
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo pip install flake8
|
||||
- run: ./bin/flake8_tests.sh
|
||||
backend-unit-tests:
|
||||
environment:
|
||||
COMPOSE_FILE: .circleci/docker-compose.circle.yml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
docker:
|
||||
- image: circleci/buildpack-deps:xenial
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run:
|
||||
name: Build Docker Images
|
||||
command: |
|
||||
set -x
|
||||
docker-compose build --build-arg skip_ds_deps=true --build-arg skip_frontend_build=true
|
||||
docker-compose up -d
|
||||
sleep 10
|
||||
- run:
|
||||
name: Create Test Database
|
||||
command: docker-compose run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- run:
|
||||
name: List Enabled Query Runners
|
||||
command: docker-compose run --rm redash manage ds list_types
|
||||
- run:
|
||||
name: Run Tests
|
||||
command: docker-compose run --name tests redash tests --junitxml=junit.xml --cov-report xml --cov=redash --cov-config .coveragerc tests/
|
||||
- run:
|
||||
name: Copy Test Results
|
||||
command: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
when: always
|
||||
- store_test_results:
|
||||
path: /tmp/test-results
|
||||
- store_artifacts:
|
||||
path: coverage.xml
|
||||
frontend-lint:
|
||||
environment:
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- checkout
|
||||
- run: mkdir -p /tmp/test-results/eslint
|
||||
- run: npm ci
|
||||
- run: npm run lint:ci
|
||||
- store_test_results:
|
||||
path: /tmp/test-results
|
||||
frontend-unit-tests:
|
||||
environment:
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install python3-pip
|
||||
- run: sudo pip3 install -r requirements_bundles.txt
|
||||
- run: npm ci
|
||||
- run: npm run bundle
|
||||
- run:
|
||||
name: Run App Tests
|
||||
command: npm test
|
||||
- run:
|
||||
name: Run Visualizations Tests
|
||||
command: (cd viz-lib && npm test)
|
||||
- run: npm run lint
|
||||
frontend-e2e-tests:
|
||||
environment:
|
||||
COMPOSE_FILE: .circleci/docker-compose.cypress.yml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
|
||||
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
|
||||
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
docker:
|
||||
- image: circleci/node:12
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout
|
||||
- run:
|
||||
name: Enable Code Coverage report for master branch
|
||||
command: |
|
||||
if [ "$CIRCLE_BRANCH" = "master" ]; then
|
||||
echo 'export CODE_COVERAGE=true' >> $BASH_ENV
|
||||
source $BASH_ENV
|
||||
fi
|
||||
- run:
|
||||
name: Install npm dependencies
|
||||
command: |
|
||||
npm ci
|
||||
- run:
|
||||
name: Setup Redash server
|
||||
command: |
|
||||
npm run cypress build
|
||||
npm run cypress start -- --skip-db-seed
|
||||
docker-compose run cypress npm run cypress db-seed
|
||||
- run:
|
||||
name: Execute Cypress tests
|
||||
command: npm run cypress run-ci
|
||||
- run:
|
||||
name: "Failure: output container logs to console"
|
||||
command: |
|
||||
docker-compose logs
|
||||
when: on_fail
|
||||
- run:
|
||||
name: Copy Code Coverage results
|
||||
command: |
|
||||
docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
when: always
|
||||
- store_artifacts:
|
||||
path: coverage
|
||||
build-docker-image: *build-docker-image-job
|
||||
build-preview-docker-image: *build-docker-image-job
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- backend-lint
|
||||
- backend-unit-tests:
|
||||
requires:
|
||||
- backend-lint
|
||||
- frontend-lint
|
||||
- frontend-unit-tests:
|
||||
requires:
|
||||
- backend-lint
|
||||
- frontend-lint
|
||||
- frontend-e2e-tests:
|
||||
requires:
|
||||
- frontend-lint
|
||||
- build-preview-docker-image:
|
||||
requires:
|
||||
- backend-unit-tests
|
||||
- frontend-unit-tests
|
||||
- frontend-e2e-tests
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- hold:
|
||||
type: approval
|
||||
requires:
|
||||
- backend-unit-tests
|
||||
- frontend-unit-tests
|
||||
- frontend-e2e-tests
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- /release\/.*/
|
||||
- build-docker-image:
|
||||
requires:
|
||||
- hold
|
||||
@@ -1,3 +1,4 @@
|
||||
version: '2.2'
|
||||
services:
|
||||
redash:
|
||||
build: ../
|
||||
@@ -11,15 +12,12 @@ services:
|
||||
PYTHONUNBUFFERED: 0
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
image: redis:3.0-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: postgres:9.5.6-alpine
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
@@ -1,14 +1,15 @@
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: ../
|
||||
args:
|
||||
install_groups: "main"
|
||||
skip_dev_deps: "true"
|
||||
skip_ds_deps: "true"
|
||||
code_coverage: ${CODE_COVERAGE}
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
@@ -43,7 +44,7 @@ services:
|
||||
ipc: host
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: .ci/Dockerfile.cypress
|
||||
dockerfile: .circleci/Dockerfile.cypress
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
@@ -63,11 +64,9 @@ services:
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY}
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
image: redis:3.0-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: postgres:9.5.6-alpine
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
17
.circleci/docker_build
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG=$VERSION.b$CIRCLE_BUILD_NUM
|
||||
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||
|
||||
if [ $CIRCLE_BRANCH = master ] || [ $CIRCLE_BRANCH = preview-image ]
|
||||
then
|
||||
docker build --build-arg skip_dev_deps=true -t redash/redash:preview -t redash/preview:$VERSION_TAG .
|
||||
docker push redash/redash:preview
|
||||
docker push redash/preview:$VERSION_TAG
|
||||
else
|
||||
docker build --build-arg skip_dev_deps=true -t redash/redash:$VERSION_TAG .
|
||||
docker push redash/redash:$VERSION_TAG
|
||||
fi
|
||||
|
||||
echo "Built: $VERSION_TAG"
|
||||
6
.circleci/update_version
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
|
||||
|
||||
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '$FULL_VERSION'/" redash/__init__.py
|
||||
sed -i "s/dev/$CIRCLE_SHA1/" client/app/version.json
|
||||
@@ -1,4 +1,5 @@
|
||||
client/.tmp/
|
||||
client/dist/
|
||||
node_modules/
|
||||
viz-lib/node_modules/
|
||||
.tmp/
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/---bug_report.md
vendored
@@ -7,10 +7,10 @@ about: Report reproducible software issues so we can improve
|
||||
|
||||
We use GitHub only for bug reports 🐛
|
||||
|
||||
Anything else should be a discussion: https://github.com/getredash/redash/discussions/ 👫
|
||||
Anything else should be posted to https://discuss.redash.io 👫
|
||||
|
||||
🚨For support, help & questions use https://github.com/getredash/redash/discussions/categories/q-a
|
||||
💡For feature requests & ideas use https://github.com/getredash/redash/discussions/categories/ideas
|
||||
🚨For support, help & questions use https://discuss.redash.io/c/support
|
||||
💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests
|
||||
|
||||
**Found a security vulnerability?** Please email security@redash.io to report any security vulnerabilities. We will acknowledge receipt of your vulnerability and strive to send you regular updates about our progress. If you're curious about the status of your disclosure please feel free to email us again. If you want to encrypt your disclosure email, you can use this PGP key.
|
||||
|
||||
|
||||
10
.github/ISSUE_TEMPLATE/--anything_else.md
vendored
@@ -1,17 +1,17 @@
|
||||
---
|
||||
name: "\U0001F4A1Anything else"
|
||||
about: "For help, support, features & ideas - please use Discussions \U0001F46B "
|
||||
about: "For help, support, features & ideas - please use https://discuss.redash.io \U0001F46B "
|
||||
labels: "Support Question"
|
||||
---
|
||||
|
||||
We use GitHub only for bug reports 🐛
|
||||
|
||||
Anything else should be a discussion: https://github.com/getredash/redash/discussions/ 👫
|
||||
Anything else should be posted to https://discuss.redash.io 👫
|
||||
|
||||
🚨For support, help & questions use https://github.com/getredash/redash/discussions/categories/q-a
|
||||
💡For feature requests & ideas use https://github.com/getredash/redash/discussions/categories/ideas
|
||||
🚨For support, help & questions use https://discuss.redash.io/c/support
|
||||
💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests
|
||||
|
||||
Alternatively, check out these resources below. Thanks! 😁.
|
||||
|
||||
- [Discussions](https://github.com/getredash/redash/discussions/)
|
||||
- [Forum](https://disucss.redash.io)
|
||||
- [Knowledge Base](https://redash.io/help)
|
||||
|
||||
17
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,26 +1,15 @@
|
||||
## What type of PR is this?
|
||||
<!-- Check all that apply, delete what doesn't apply. -->
|
||||
## What type of PR is this? (check all applicable)
|
||||
<!-- Please leave only what's applicable -->
|
||||
|
||||
- [ ] Refactor
|
||||
- [ ] Feature
|
||||
- [ ] Bug Fix
|
||||
- [ ] New Query Runner (Data Source)
|
||||
- [ ] New Query Runner (Data Source)
|
||||
- [ ] New Alert Destination
|
||||
- [ ] Other
|
||||
|
||||
## Description
|
||||
<!-- In case of adding / modifying a query runner, please specify which version(s) you expect are compatible. -->
|
||||
|
||||
## How is this tested?
|
||||
|
||||
- [ ] Unit tests (pytest, jest)
|
||||
- [ ] E2E Tests (Cypress)
|
||||
- [ ] Manually
|
||||
- [ ] N/A
|
||||
|
||||
<!-- If Manually, please describe. -->
|
||||
|
||||
## Related Tickets & Documents
|
||||
<!-- If applicable, please include a link to your documentation PR against getredash/website -->
|
||||
|
||||
## Mobile & Desktop Screenshots/Recordings (if there are UI changes)
|
||||
|
||||
23
.github/support.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Configuration for Support Requests - https://github.com/dessant/support-requests
|
||||
|
||||
# Label used to mark issues as support requests
|
||||
supportLabel: Support Question
|
||||
|
||||
# Comment to post on issues marked as support requests, `{issue-author}` is an
|
||||
# optional placeholder. Set to `false` to disable
|
||||
supportComment: >
|
||||
:wave: @{issue-author}, we use the issue tracker exclusively for bug reports
|
||||
and planned work. However, this issue appears to be a support request.
|
||||
Please use [our forum](https://discuss.redash.io) to get help.
|
||||
|
||||
# Close issues marked as support requests
|
||||
close: true
|
||||
|
||||
# Lock issues marked as support requests
|
||||
lock: false
|
||||
|
||||
# Assign `off-topic` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: true
|
||||
|
||||
# Repository to extend settings from
|
||||
# _extends: repo
|
||||
177
.github/workflows/ci.yml
vendored
@@ -1,177 +0,0 @@
|
||||
name: Tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install black==23.1.0 ruff==0.0.287
|
||||
- run: ruff check .
|
||||
- run: black --check .
|
||||
|
||||
backend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.ci.yaml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
docker compose build --build-arg install_groups="main,all_ds,dev" --build-arg skip_frontend_build=true
|
||||
docker compose up -d
|
||||
sleep 10
|
||||
- name: Create Test Database
|
||||
run: docker compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- name: List Enabled Query Runners
|
||||
run: docker compose -p redash run --rm redash manage ds list_types
|
||||
- name: Run Tests
|
||||
run: docker compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
- name: Copy Test Results
|
||||
run: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v3
|
||||
# with:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: backend-test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.xml
|
||||
|
||||
frontend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend-test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
- name: Run Visualizations Tests
|
||||
run: cd viz-lib && yarn test
|
||||
- run: yarn lint
|
||||
|
||||
frontend-e2e-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.cypress.yaml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
# PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
# CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
# CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Enable Code Coverage Report For Master Branch
|
||||
if: endsWith(github.ref, '/master')
|
||||
run: |
|
||||
echo "CODE_COVERAGE=true" >> "$GITHUB_ENV"
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
set -x
|
||||
yarn cypress build
|
||||
yarn cypress start -- --skip-db-seed
|
||||
docker compose run cypress yarn cypress db-seed
|
||||
- name: Execute Cypress Tests
|
||||
run: yarn cypress run-ci
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage
|
||||
86
.github/workflows/periodic-snapshot.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: Periodic Snapshot
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '10 0 1 * *' # 10 minutes after midnight on the first day of every month
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
bump:
|
||||
description: 'Bump the last digit of the version'
|
||||
required: false
|
||||
type: boolean
|
||||
version:
|
||||
description: 'Specific version to set'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
bump-version-and-tag:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref_name == github.event.repository.default_branch
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ssh-key: ${{ secrets.ACTION_PUSH_KEY }}
|
||||
|
||||
- run: |
|
||||
git config user.name 'github-actions[bot]'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
# Function to bump the version
|
||||
bump_version() {
|
||||
local version="$1"
|
||||
local IFS=.
|
||||
read -r major minor patch <<< "$version"
|
||||
patch=$((patch + 1))
|
||||
echo "$major.$minor.$patch-dev"
|
||||
}
|
||||
|
||||
# Determine the new version tag
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
BUMP_INPUT="${{ github.event.inputs.bump }}"
|
||||
SPECIFIC_VERSION="${{ github.event.inputs.version }}"
|
||||
|
||||
# Check if both bump and specific version are provided
|
||||
if [ "$BUMP_INPUT" = "true" ] && [ -n "$SPECIFIC_VERSION" ]; then
|
||||
echo "::error::Error: Cannot specify both bump and specific version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$SPECIFIC_VERSION" ]; then
|
||||
TAG_NAME="$SPECIFIC_VERSION-dev"
|
||||
elif [ "$BUMP_INPUT" = "true" ]; then
|
||||
CURRENT_VERSION=$(grep '"version":' package.json | awk -F\" '{print $4}')
|
||||
TAG_NAME=$(bump_version "$CURRENT_VERSION")
|
||||
else
|
||||
echo "No version bump or specific version provided for manual dispatch."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
fi
|
||||
|
||||
echo "New version tag: $TAG_NAME"
|
||||
|
||||
# Update version in files
|
||||
gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${TAG_NAME}"
|
||||
git tag ${TAG_NAME}
|
||||
git push --atomic origin master refs/tags/${TAG_NAME}
|
||||
|
||||
# Run the 'preview-image' workflow if run this workflow manually
|
||||
# For more information, please see the: https://docs.github.com/en/actions/security-guides/automatic-token-authentication
|
||||
if [ "$BUMP_INPUT" = "true" ] || [ -n "$SPECIFIC_VERSION" ]; then
|
||||
gh workflow run preview-image.yml --ref $TAG_NAME
|
||||
fi
|
||||
185
.github/workflows/preview-image.yml
vendored
@@ -1,185 +0,0 @@
|
||||
name: Preview Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*-dev'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dockerRepository:
|
||||
description: 'Docker repository'
|
||||
required: true
|
||||
default: 'preview'
|
||||
type: choice
|
||||
options:
|
||||
- preview
|
||||
- redash
|
||||
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
|
||||
jobs:
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
- name: Skip?
|
||||
id: skip-check
|
||||
run: |
|
||||
if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then
|
||||
echo 'Docker user is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ vars.DOCKER_REPOSITORY }}" == '' ]]; then
|
||||
echo 'Docker repository is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
echo skip=false >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-22.04
|
||||
- arch: arm64
|
||||
os: ubuntu-22.04-arm
|
||||
outputs:
|
||||
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
|
||||
needs:
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.push.after }}
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
.ci/update_version
|
||||
VERSION_TAG=$(jq -r .version package.json)
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push preview image to Docker Hub
|
||||
id: build-preview
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
with:
|
||||
tags: |
|
||||
${{ vars.DOCKER_REPOSITORY }}/redash
|
||||
${{ vars.DOCKER_REPOSITORY }}/preview
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
outputs: type=image,push-by-digest=true,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: Build and push release image to Docker Hub
|
||||
id: build-release
|
||||
uses: docker/build-push-action@v4
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
with:
|
||||
tags: |
|
||||
${{ vars.DOCKER_REPOSITORY }}/redash:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
outputs: type=image,push-by-digest=false,push=true
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
|
||||
digest="${{ steps.build-preview.outputs.digest}}"
|
||||
else
|
||||
digest="${{ steps.build-release.outputs.digest}}"
|
||||
fi
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ matrix.arch }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
|
||||
merge-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: build-docker-image
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Create and push manifest for the preview image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:preview \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:preview@sha256:%s ' *)
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
|
||||
- name: Create and push manifest for the release image
|
||||
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||
36
.github/workflows/restyled.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Restyled
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
restyled:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- uses: restyled-io/actions/setup@v4
|
||||
- id: restyler
|
||||
uses: restyled-io/actions/run@v4
|
||||
with:
|
||||
fail-on-differences: true
|
||||
|
||||
- if: |
|
||||
!cancelled() &&
|
||||
steps.restyler.outputs.success == 'true' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
base: ${{ steps.restyler.outputs.restyled-base }}
|
||||
branch: ${{ steps.restyler.outputs.restyled-head }}
|
||||
title: ${{ steps.restyler.outputs.restyled-title }}
|
||||
body: ${{ steps.restyler.outputs.restyled-body }}
|
||||
labels: "restyled"
|
||||
reviewers: ${{ github.event.pull_request.user.login }}
|
||||
delete-branch: true
|
||||
1
.gitignore
vendored
@@ -17,7 +17,6 @@ client/dist
|
||||
_build
|
||||
.vscode
|
||||
.env
|
||||
.tool-versions
|
||||
|
||||
dump.rdb
|
||||
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: "v0.0.287"
|
||||
hooks:
|
||||
- id: ruff
|
||||
@@ -38,9 +38,7 @@ request_review: author
|
||||
#
|
||||
# These can be used to tell other automation to avoid our PRs.
|
||||
#
|
||||
labels:
|
||||
- restyled
|
||||
- "Skip CI"
|
||||
labels: ["Skip CI"]
|
||||
|
||||
# Labels to ignore
|
||||
#
|
||||
@@ -52,16 +50,13 @@ labels:
|
||||
# Restylers to run, and how
|
||||
restylers:
|
||||
- name: black
|
||||
image: restyled/restyler-black:v24.4.2
|
||||
image: restyled/restyler-black:v19.10b0
|
||||
include:
|
||||
- redash
|
||||
- tests
|
||||
- migrations/versions
|
||||
- name: prettier
|
||||
image: restyled/restyler-prettier:v3.3.2-2
|
||||
command:
|
||||
- prettier
|
||||
- --write
|
||||
image: restyled/restyler-prettier:v1.19.1-2
|
||||
include:
|
||||
- client/app/**/*.js
|
||||
- client/app/**/*.jsx
|
||||
|
||||
2
.yarn/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
*
|
||||
!.gitignore
|
||||
@@ -4,7 +4,19 @@ Thank you for taking the time to contribute! :tada::+1:
|
||||
|
||||
The following is a set of guidelines for contributing to Redash. These are guidelines, not rules, please use your best judgement and feel free to propose changes to this document in a pull request.
|
||||
|
||||
:star: If you're already here and love the project, please make sure to press the Star button. :star:
|
||||
## Quick Links:
|
||||
|
||||
- [Feature Requests](https://discuss.redash.io/c/feature-requests)
|
||||
- [Documentation](https://redash.io/help/)
|
||||
- [Blog](https://blog.redash.io/)
|
||||
- [Twitter](https://twitter.com/getredash)
|
||||
|
||||
---
|
||||
:star: If you already here and love the project, please make sure to press the Star button. :star:
|
||||
|
||||
---
|
||||
|
||||
|
||||
## Table of Contents
|
||||
|
||||
[How can I contribute?](#how-can-i-contribute)
|
||||
@@ -20,13 +32,6 @@ The following is a set of guidelines for contributing to Redash. These are guide
|
||||
- [Release Method](#release-method)
|
||||
- [Code of Conduct](#code-of-conduct)
|
||||
|
||||
## Quick Links:
|
||||
|
||||
- [User Forum](https://github.com/getredash/redash/discussions)
|
||||
- [Documentation](https://redash.io/help/)
|
||||
|
||||
|
||||
---
|
||||
## How can I contribute?
|
||||
|
||||
### Reporting Bugs
|
||||
@@ -34,54 +39,25 @@ The following is a set of guidelines for contributing to Redash. These are guide
|
||||
When creating a new bug report, please make sure to:
|
||||
|
||||
- Search for existing issues first. If you find a previous report of your issue, please update the existing issue with additional information instead of creating a new one.
|
||||
- If you are not sure if your issue is really a bug or just some configuration/setup problem, please start a [Q&A discussion](https://github.com/getredash/redash/discussions/new?category=q-a) first. Unless you can provide clear steps to reproduce, it's probably better to start with a discussion and later to open an issue.
|
||||
- If you are not sure if your issue is really a bug or just some configuration/setup problem, please start a discussion in [the support forum](https://discuss.redash.io/c/support) first. Unless you can provide clear steps to reproduce, it's probably better to start with a thread in the forum and later to open an issue.
|
||||
- If you still decide to open an issue, please review the template and guidelines and include as much details as possible.
|
||||
|
||||
### Suggesting Enhancements / Feature Requests
|
||||
|
||||
If you would like to suggest an enhancement or ask for a new feature:
|
||||
|
||||
- Please check [the Ideas discussions](https://github.com/getredash/redash/discussions/categories/ideas) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- Please check [the forum](https://discuss.redash.io/c/feature-requests/5) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- If there is no open thread, you're welcome to start one to have a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*.
|
||||
|
||||
### Pull Requests
|
||||
|
||||
**Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This is to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it.
|
||||
|
||||
#### Criteria for Review / Merging
|
||||
|
||||
When you open your pull request, please follow this repository’s PR template carefully:
|
||||
|
||||
- Indicate the type of change
|
||||
- If you implement multiple unrelated features, bug fixes, or refactors please split them into individual pull requests.
|
||||
- Describe the change
|
||||
- If fixing a bug, please describe the bug or link to an existing github issue / forum discussion
|
||||
- Include UI screenshots / GIFs whenever possible
|
||||
- **Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it.
|
||||
- Include screenshots and animated GIFs in your pull request whenever possible.
|
||||
- Please add [documentation](#documentation) for new features or changes in functionality along with the code.
|
||||
- Please follow existing code style:
|
||||
- Python: we use [Black](https://github.com/psf/black) to auto format the code.
|
||||
- Javascript: we use [Prettier](https://github.com/prettier/prettier) to auto-format the code.
|
||||
|
||||
#### Initial Review (1 week)
|
||||
|
||||
During this phase, a team member will apply the “Team Review” label if a pull request meets our criteria or a “Needs More Information” label if not. If more information is required, the team member will comment which criteria have not been met.
|
||||
|
||||
If your pull request receives the “Needs More Information” label, please make the requested changes and then remove the label. This resets the 1 week timer for an initial review.
|
||||
|
||||
Stale pull requests that remain untouched in “Needs More Information” for more than 4 weeks will be closed.
|
||||
|
||||
If a team member closes your pull request, you may reopen it after you have made the changes requested during initial review. After you make these changes, remove the “Needs More Information” label. This again resets the timer for another initial review.
|
||||
|
||||
#### Full Review (2 weeks)
|
||||
|
||||
After the “Team Review” label is applied, a member of the core team will review the PR within 2 weeks.
|
||||
|
||||
Reviews will approve, request changes, or ask questions to discuss areas of uncertainty. After you’ve responded, a member of the team will re-review within one week.
|
||||
|
||||
#### Merging (1 week)
|
||||
|
||||
After your pull request has been approved, a member of the core team will merge the pull request within a week.
|
||||
|
||||
|
||||
### Documentation
|
||||
|
||||
The project's documentation can be found at [https://redash.io/help/](https://redash.io/help/). The [documentation sources](https://github.com/getredash/website/tree/master/src/pages/kb) are hosted on GitHub. To contribute edits / new pages, you can use GitHub's interface. Click the "Edit on GitHub" link on the documentation page to quickly open the edit interface.
|
||||
|
||||
139
Dockerfile
@@ -1,6 +1,4 @@
|
||||
FROM node:18-bookworm AS frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
FROM node:12 as frontend-builder
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
@@ -12,108 +10,93 @@ RUN useradd -m -d /frontend redash
|
||||
USER redash
|
||||
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash package.json package-lock.json /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
COPY --chown=redash scripts /frontend/scripts
|
||||
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
# Avoid issues caused by lags in disk and network I/O speeds when working on top of QEMU emulation for multi-platform image building.
|
||||
RUN yarn config set network-timeout 300000
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then npm ci --unsafe-perm; fi
|
||||
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN <<EOF
|
||||
if [ "x$skip_frontend_build" = "x" ]; then
|
||||
yarn build
|
||||
else
|
||||
mkdir -p /frontend/client/dist
|
||||
touch /frontend/client/dist/multi_org.html
|
||||
touch /frontend/client/dist/index.html
|
||||
fi
|
||||
EOF
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then npm run build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
|
||||
FROM python:3.10-slim-bookworm
|
||||
FROM python:3.7-slim-buster
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
# Controls whether to install extra dependencies needed for all data sources.
|
||||
ARG skip_ds_deps
|
||||
# Controls whether to install dev dependencies.
|
||||
ARG skip_dev_deps
|
||||
|
||||
RUN useradd --create-home redash
|
||||
|
||||
# Ubuntu packages
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
pkg-config \
|
||||
curl \
|
||||
gnupg \
|
||||
build-essential \
|
||||
pwgen \
|
||||
libffi-dev \
|
||||
sudo \
|
||||
git-core \
|
||||
# Kerberos, needed for MS SQL Python driver to compile on arm64
|
||||
libkrb5-dev \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# ODBC support:
|
||||
g++ unixodbc-dev \
|
||||
# for SAML
|
||||
xmlsec1 \
|
||||
# Additional packages required for data sources:
|
||||
libssl-dev \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libsasl2-dev \
|
||||
unzip \
|
||||
libsasl2-modules-gssapi-mit && \
|
||||
apt-get install -y \
|
||||
curl \
|
||||
gnupg \
|
||||
build-essential \
|
||||
pwgen \
|
||||
libffi-dev \
|
||||
sudo \
|
||||
git-core \
|
||||
wget \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# ODBC support:
|
||||
g++ unixodbc-dev \
|
||||
# for SAML
|
||||
xmlsec1 \
|
||||
# Additional packages required for data sources:
|
||||
libssl-dev \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libsasl2-dev \
|
||||
unzip \
|
||||
libsasl2-modules-gssapi-mit && \
|
||||
# MSSQL ODBC Driver:
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
|
||||
curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
|
||||
apt-get update && \
|
||||
ACCEPT_EULA=Y apt-get install -y msodbcsql17 && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN <<EOF
|
||||
if [ "$TARGETPLATFORM" = "linux/amd64" ]; then
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg
|
||||
curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list
|
||||
apt-get update
|
||||
ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip
|
||||
chmod 600 /tmp/simba_odbc.zip
|
||||
unzip /tmp/simba_odbc.zip -d /tmp/simba
|
||||
dpkg -i /tmp/simba/*.deb
|
||||
printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini
|
||||
rm /tmp/simba_odbc.zip
|
||||
rm -rf /tmp/simba
|
||||
fi
|
||||
EOF
|
||||
ARG databricks_odbc_driver_url=https://databricks.com/wp-content/uploads/2.6.10.1010-2/SimbaSparkODBC-2.6.10.1010-2-Debian-64bit.zip
|
||||
RUN wget --quiet $databricks_odbc_driver_url -O /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/ \
|
||||
&& dpkg -i /tmp/SimbaSparkODBC-*/*.deb \
|
||||
&& echo "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
|
||||
&& rm /tmp/simba_odbc.zip \
|
||||
&& rm -rf /tmp/SimbaSparkODBC*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV POETRY_VERSION=1.8.3
|
||||
ENV POETRY_HOME=/etc/poetry
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
# Disalbe PIP Cache and Version Check
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV PIP_NO_CACHE_DIR=1
|
||||
|
||||
# Avoid crashes, including corrupted cache artifacts, when building multi-platform images with GitHub Actions.
|
||||
RUN /etc/poetry/bin/poetry cache clear pypi --all
|
||||
# rollback pip version to avoid legacy resolver problem
|
||||
RUN pip install pip==20.2.4;
|
||||
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
# We first copy only the requirements file, to avoid rebuilding on every file change.
|
||||
COPY requirements_all_ds.txt ./
|
||||
RUN if [ "x$skip_ds_deps" = "x" ] ; then pip install -r requirements_all_ds.txt ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi
|
||||
|
||||
ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi"
|
||||
# for LDAP authentication, install with `ldap3` group
|
||||
# disabled by default due to GPL license conflict
|
||||
ARG install_groups="main,all_ds,dev"
|
||||
RUN /etc/poetry/bin/poetry install --only $install_groups $POETRY_OPTIONS
|
||||
COPY requirements_bundles.txt requirements_dev.txt ./
|
||||
RUN if [ "x$skip_dev_deps" = "x" ] ; then pip install -r requirements_dev.txt ; fi
|
||||
|
||||
COPY --chown=redash . /app
|
||||
COPY --from=frontend-builder --chown=redash /frontend/client/dist /app/client/dist
|
||||
RUN chown redash /app
|
||||
COPY requirements.txt ./
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
COPY . /app
|
||||
COPY --from=frontend-builder /frontend/client/dist /app/client/dist
|
||||
RUN chown -R redash /app
|
||||
USER redash
|
||||
|
||||
ENTRYPOINT ["/app/bin/docker-entrypoint"]
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
The Bahrain map data used in Redash was downloaded from
|
||||
https://cartographyvectors.com/map/857-bahrain-detailed-boundary in PR #6192.
|
||||
* Free for personal and commercial purpose with attribution.
|
||||
79
Makefile
@@ -1,80 +1,57 @@
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database clean down bundle tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
compose_build:
|
||||
docker-compose build
|
||||
|
||||
up:
|
||||
docker compose up -d redis postgres --remove-orphans
|
||||
docker compose exec -u postgres postgres psql postgres --csv \
|
||||
-1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \
|
||||
| grep -q "organizations" || make create_database
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build --remove-orphans
|
||||
docker-compose up -d --build
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
if (docker compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \
|
||||
if (docker-compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \
|
||||
else echo "postgres initializing..."; sleep 5; fi \
|
||||
done
|
||||
docker compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
docker-compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
|
||||
create_database: .env
|
||||
docker compose run server create_db
|
||||
create_database:
|
||||
docker-compose run server create_db
|
||||
|
||||
clean:
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
docker compose rm --stop --force
|
||||
docker compose --project-name cypress rm --stop --force
|
||||
docker image rm --force \
|
||||
cypress-server:latest cypress-worker:latest cypress-scheduler:latest \
|
||||
redash-server:latest redash-worker:latest redash-scheduler:latest
|
||||
docker container prune --force
|
||||
docker image prune --force
|
||||
docker volume prune --force
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:latest redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
docker-compose down && docker-compose rm
|
||||
|
||||
down:
|
||||
docker compose down
|
||||
docker-compose down
|
||||
|
||||
.env:
|
||||
printf "REDASH_COOKIE_SECRET=`pwgen -1s 32`\nREDASH_SECRET_KEY=`pwgen -1s 32`\n" >> .env
|
||||
|
||||
env: .env
|
||||
|
||||
format:
|
||||
pre-commit run --all-files
|
||||
bundle:
|
||||
docker-compose run server bin/bundle-extensions
|
||||
|
||||
tests:
|
||||
docker compose run server tests
|
||||
docker-compose run server tests
|
||||
|
||||
lint:
|
||||
ruff check .
|
||||
black --check . --diff
|
||||
./bin/flake8_tests.sh
|
||||
|
||||
backend-unit-tests: up test_db
|
||||
docker compose run --rm --name tests server tests
|
||||
docker-compose run --rm --name tests server tests
|
||||
|
||||
frontend-unit-tests:
|
||||
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 yarn --frozen-lockfile
|
||||
yarn test
|
||||
frontend-unit-tests: bundle
|
||||
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm ci
|
||||
npm run bundle
|
||||
npm test
|
||||
|
||||
test: backend-unit-tests frontend-unit-tests lint
|
||||
test: lint backend-unit-tests frontend-unit-tests
|
||||
|
||||
build:
|
||||
yarn build
|
||||
build: bundle
|
||||
npm run build
|
||||
|
||||
watch:
|
||||
yarn watch
|
||||
watch: bundle
|
||||
npm run watch
|
||||
|
||||
start:
|
||||
yarn start
|
||||
start: bundle
|
||||
npm run start
|
||||
|
||||
redis-cli:
|
||||
docker compose run --rm redis redis-cli -h redis
|
||||
docker-compose run --rm redis redis-cli -h redis
|
||||
|
||||
bash:
|
||||
docker compose run --rm server bash
|
||||
docker-compose run --rm server bash
|
||||
|
||||
41
README.md
@@ -3,7 +3,8 @@
|
||||
</p>
|
||||
|
||||
[](https://redash.io/help/)
|
||||
[](https://github.com/getredash/redash/actions)
|
||||
[](https://datree.io/?src=badge)
|
||||
[](https://circleci.com/gh/getredash/redash/tree/master)
|
||||
|
||||
Redash is designed to enable anyone, regardless of the level of technical sophistication, to harness the power of data big and small. SQL users leverage Redash to explore, query, visualize, and share data from any data sources. Their work in turn enables anybody in their organization to use the data. Every day, millions of users at thousands of organizations around the world use Redash to develop insights and make data-driven decisions.
|
||||
|
||||
@@ -31,71 +32,50 @@ Redash features:
|
||||
Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help/data-sources/supported-data-sources). It can also be extended to support more. Below is a list of built-in sources:
|
||||
|
||||
- Amazon Athena
|
||||
- Amazon CloudWatch / Insights
|
||||
- Amazon DynamoDB
|
||||
- Amazon Redshift
|
||||
- ArangoDB
|
||||
- Axibase Time Series Database
|
||||
- Apache Cassandra
|
||||
- Cassandra
|
||||
- ClickHouse
|
||||
- CockroachDB
|
||||
- Couchbase
|
||||
- CSV
|
||||
- Databricks
|
||||
- Databricks (Apache Spark)
|
||||
- DB2 by IBM
|
||||
- Dgraph
|
||||
- Apache Drill
|
||||
- Apache Druid
|
||||
- e6data
|
||||
- Eccenca Corporate Memory
|
||||
- Druid
|
||||
- Elasticsearch
|
||||
- Exasol
|
||||
- Microsoft Excel
|
||||
- Firebolt
|
||||
- Databend
|
||||
- Google Analytics
|
||||
- Google BigQuery
|
||||
- Google Spreadsheets
|
||||
- Graphite
|
||||
- Greenplum
|
||||
- Apache Hive
|
||||
- Apache Impala
|
||||
- Hive
|
||||
- Impala
|
||||
- InfluxDB
|
||||
- InfluxDBv2
|
||||
- IBM Netezza Performance Server
|
||||
- JIRA (JQL)
|
||||
- JIRA
|
||||
- JSON
|
||||
- Apache Kylin
|
||||
- OmniSciDB (Formerly MapD)
|
||||
- MariaDB
|
||||
- MemSQL
|
||||
- Microsoft Azure Data Warehouse / Synapse
|
||||
- Microsoft Azure SQL Database
|
||||
- Microsoft Azure Data Explorer / Kusto
|
||||
- Microsoft SQL Server
|
||||
- MongoDB
|
||||
- MySQL
|
||||
- Oracle
|
||||
- Apache Phoenix
|
||||
- Apache Pinot
|
||||
- PostgreSQL
|
||||
- Presto
|
||||
- Prometheus
|
||||
- Python
|
||||
- Qubole
|
||||
- Rockset
|
||||
- RisingWave
|
||||
- Salesforce
|
||||
- ScyllaDB
|
||||
- Shell Scripts
|
||||
- Snowflake
|
||||
- SPARQL
|
||||
- SQLite
|
||||
- TiDB
|
||||
- Tinybird
|
||||
- TreasureData
|
||||
- Trino
|
||||
- Uptycs
|
||||
- Vertica
|
||||
- Yandex AppMetrrica
|
||||
- Yandex Metrica
|
||||
@@ -103,13 +83,12 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
## Getting Help
|
||||
|
||||
* Issues: https://github.com/getredash/redash/issues
|
||||
* Discussion Forum: https://github.com/getredash/redash/discussions/
|
||||
* Development Discussion: https://discord.gg/tN5MdmfGBp
|
||||
* Discussion Forum: https://discuss.redash.io/
|
||||
|
||||
## Reporting Bugs and Contributing Code
|
||||
|
||||
* Want to report a bug or request a feature? Please open [an issue](https://github.com/getredash/redash/issues/new).
|
||||
* Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://github.com/getredash/redash/wiki/Local-development-setup) and make a pull request. We need all the help we can get!
|
||||
* Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://redash.io/help-onpremise/dev/guide.html) and make a pull request. We need all the help we can get!
|
||||
|
||||
## Security
|
||||
|
||||
|
||||
115
bin/bundle-extensions
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Copy bundle extension files to the client/app/extension directory"""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from shutil import copy
|
||||
from collections import OrderedDict as odict
|
||||
|
||||
import importlib_metadata
|
||||
import importlib_resources
|
||||
|
||||
# Name of the subdirectory
|
||||
BUNDLE_DIRECTORY = "bundle"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Make a directory for extensions and set it as an environment variable
|
||||
# to be picked up by webpack.
|
||||
extensions_relative_path = Path("client", "app", "extensions")
|
||||
extensions_directory = Path(__file__).parent.parent / extensions_relative_path
|
||||
|
||||
if not extensions_directory.exists():
|
||||
extensions_directory.mkdir()
|
||||
os.environ["EXTENSIONS_DIRECTORY"] = str(extensions_relative_path)
|
||||
|
||||
|
||||
def entry_point_module(entry_point):
|
||||
"""Returns the dotted module path for the given entry point"""
|
||||
return entry_point.pattern.match(entry_point.value).group("module")
|
||||
|
||||
|
||||
def load_bundles():
|
||||
""""Load bundles as defined in Redash extensions.
|
||||
|
||||
The bundle entry point can be defined as a dotted path to a module
|
||||
or a callable, but it won't be called but just used as a means
|
||||
to find the files under its file system path.
|
||||
|
||||
The name of the directory it looks for files in is "bundle".
|
||||
|
||||
So a Python package with an extension bundle could look like this::
|
||||
|
||||
my_extensions/
|
||||
├── __init__.py
|
||||
└── wide_footer
|
||||
├── __init__.py
|
||||
└── bundle
|
||||
├── extension.js
|
||||
└── styles.css
|
||||
|
||||
and would then need to register the bundle with an entry point
|
||||
under the "redash.bundles" group, e.g. in your setup.py::
|
||||
|
||||
setup(
|
||||
# ...
|
||||
entry_points={
|
||||
"redash.bundles": [
|
||||
"wide_footer = my_extensions.wide_footer",
|
||||
]
|
||||
# ...
|
||||
},
|
||||
# ...
|
||||
)
|
||||
|
||||
"""
|
||||
bundles = odict()
|
||||
for entry_point in importlib_metadata.entry_points().get("redash.bundles", []):
|
||||
logger.info('Loading Redash bundle "%s".', entry_point.name)
|
||||
module = entry_point_module(entry_point)
|
||||
# Try to get a list of bundle files
|
||||
try:
|
||||
bundle_dir = importlib_resources.files(module).joinpath(BUNDLE_DIRECTORY)
|
||||
except (ImportError, TypeError):
|
||||
# Module isn't a package, so can't have a subdirectory/-package
|
||||
logger.error(
|
||||
'Redash bundle module "%s" could not be imported: "%s"',
|
||||
entry_point.name,
|
||||
module,
|
||||
)
|
||||
continue
|
||||
if not bundle_dir.is_dir():
|
||||
logger.error(
|
||||
'Redash bundle directory "%s" could not be found or is not a directory: "%s"',
|
||||
entry_point.name,
|
||||
bundle_dir,
|
||||
)
|
||||
continue
|
||||
bundles[entry_point.name] = list(bundle_dir.rglob("*"))
|
||||
return bundles
|
||||
|
||||
|
||||
bundles = load_bundles().items()
|
||||
if bundles:
|
||||
print("Number of extension bundles found: {}".format(len(bundles)))
|
||||
else:
|
||||
print("No extension bundles found.")
|
||||
|
||||
for bundle_name, paths in bundles:
|
||||
# Shortcut in case not paths were found for the bundle
|
||||
if not paths:
|
||||
print('No paths found for bundle "{}".'.format(bundle_name))
|
||||
continue
|
||||
|
||||
# The destination for the bundle files with the entry point name as the subdirectory
|
||||
destination = Path(extensions_directory, bundle_name)
|
||||
if not destination.exists():
|
||||
destination.mkdir()
|
||||
|
||||
# Copy the bundle directory from the module to its destination.
|
||||
print('Copying "{}" bundle to {}:'.format(bundle_name, destination.resolve()))
|
||||
for src_path in paths:
|
||||
dest_path = destination / src_path.name
|
||||
print(" - {} -> {}".format(src_path, dest_path))
|
||||
copy(str(src_path), str(dest_path))
|
||||
@@ -22,19 +22,6 @@ worker() {
|
||||
exec supervisord -c worker.conf
|
||||
}
|
||||
|
||||
workers_healthcheck() {
|
||||
WORKERS_COUNT=${WORKERS_COUNT}
|
||||
echo "Checking active workers count against $WORKERS_COUNT..."
|
||||
ACTIVE_WORKERS_COUNT=`echo $(rq info --url $REDASH_REDIS_URL -R | grep workers | grep -oP ^[0-9]+)`
|
||||
if [ "$ACTIVE_WORKERS_COUNT" -lt "$WORKERS_COUNT" ]; then
|
||||
echo "$ACTIVE_WORKERS_COUNT workers are active, Exiting"
|
||||
exit 1
|
||||
else
|
||||
echo "$ACTIVE_WORKERS_COUNT workers are active"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
dev_worker() {
|
||||
echo "Starting dev RQ worker..."
|
||||
|
||||
@@ -45,8 +32,7 @@ server() {
|
||||
# Recycle gunicorn workers every n-th request. See http://docs.gunicorn.org/en/stable/settings.html#max-requests for more details.
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER
|
||||
}
|
||||
|
||||
create_db() {
|
||||
@@ -67,7 +53,7 @@ help() {
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via debugpy"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "create_db -- create database tables"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
@@ -89,10 +75,6 @@ case "$1" in
|
||||
shift
|
||||
worker
|
||||
;;
|
||||
workers_healthcheck)
|
||||
shift
|
||||
workers_healthcheck
|
||||
;;
|
||||
server)
|
||||
shift
|
||||
server
|
||||
|
||||
9
bin/flake8_tests.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -o errexit # fail the build if any task fails
|
||||
|
||||
flake8 --version ; pip --version
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
@@ -1,44 +1,35 @@
|
||||
#!/bin/env python3
|
||||
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def get_change_log(previous_sha):
|
||||
args = [
|
||||
"git",
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--merges",
|
||||
"--grep",
|
||||
"Merge pull request",
|
||||
'--pretty=format:"%h|%s|%b|%p"',
|
||||
"master...{}".format(previous_sha),
|
||||
]
|
||||
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', 'master...{}'.format(previous_sha)]
|
||||
log = subprocess.check_output(args)
|
||||
changes = []
|
||||
|
||||
for line in log.split("\n"):
|
||||
for line in log.split('\n'):
|
||||
try:
|
||||
sha, subject, body, parents = line[1:-1].split("|")
|
||||
sha, subject, body, parents = line[1:-1].split('|')
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pull_request = re.match(r"Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = " #{}".format(pull_request)
|
||||
except Exception:
|
||||
except Exception as ex:
|
||||
pull_request = ""
|
||||
|
||||
author = subprocess.check_output(["git", "log", "-1", '--pretty=format:"%an"', parents.split(" ")[-1]])[1:-1]
|
||||
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
|
||||
|
||||
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
previous_sha = sys.argv[1]
|
||||
changes = get_change_log(previous_sha)
|
||||
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
github_token = os.environ["GITHUB_TOKEN"]
|
||||
auth = (github_token, "x-oauth-basic")
|
||||
repo = "getredash/redash"
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
repo = 'getredash/redash'
|
||||
|
||||
def _github_request(method, path, params=None, headers={}):
|
||||
if urlparse(path).hostname != "api.github.com":
|
||||
if not path.startswith('https://api.github.com'):
|
||||
url = "https://api.github.com/{}".format(path)
|
||||
else:
|
||||
url = path
|
||||
@@ -25,18 +22,15 @@ def _github_request(method, path, params=None, headers={}):
|
||||
response = requests.request(method, url, data=params, auth=auth)
|
||||
return response
|
||||
|
||||
|
||||
def exception_from_error(message, response):
|
||||
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get("message", "?")))
|
||||
|
||||
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
|
||||
|
||||
def rc_tag_name(version):
|
||||
return "v{}-rc".format(version)
|
||||
|
||||
|
||||
def get_rc_release(version):
|
||||
tag = rc_tag_name(version)
|
||||
response = _github_request("get", "repos/{}/releases/tags/{}".format(repo, tag))
|
||||
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
|
||||
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
@@ -45,101 +39,84 @@ def get_rc_release(version):
|
||||
|
||||
raise exception_from_error("Unknown error while looking RC release: ", response)
|
||||
|
||||
|
||||
def create_release(version, commit_sha):
|
||||
tag = rc_tag_name(version)
|
||||
|
||||
params = {
|
||||
"tag_name": tag,
|
||||
"name": "{} - RC".format(version),
|
||||
"target_commitish": commit_sha,
|
||||
"prerelease": True,
|
||||
'tag_name': tag,
|
||||
'name': "{} - RC".format(version),
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
}
|
||||
|
||||
response = _github_request("post", "repos/{}/releases".format(repo), params)
|
||||
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
|
||||
|
||||
if response.status_code != 201:
|
||||
raise exception_from_error("Failed creating new release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
def upload_asset(release, filepath):
|
||||
upload_url = release["upload_url"].replace("{?name,label}", "")
|
||||
filename = filepath.split("/")[-1]
|
||||
upload_url = release['upload_url'].replace('{?name,label}', '')
|
||||
filename = filepath.split('/')[-1]
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {"Content-Type": "application/gzip"}
|
||||
response = requests.post(
|
||||
upload_url, file_content, params={"name": filename}, headers=headers, auth=auth, verify=False
|
||||
)
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
|
||||
|
||||
if response.status_code != 201: # not 200/201/...
|
||||
raise exception_from_error("Failed uploading asset", response)
|
||||
raise exception_from_error('Failed uploading asset', response)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def remove_previous_builds(release):
|
||||
for asset in release["assets"]:
|
||||
response = _github_request("delete", asset["url"])
|
||||
for asset in release['assets']:
|
||||
response = _github_request('delete', asset['url'])
|
||||
if response.status_code != 204:
|
||||
raise exception_from_error("Failed deleting asset", response)
|
||||
|
||||
|
||||
def get_changelog(commit_sha):
|
||||
latest_release = _github_request("get", "repos/{}/releases/latest".format(repo))
|
||||
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
|
||||
if latest_release.status_code != 200:
|
||||
raise exception_from_error("Failed getting latest release", latest_release)
|
||||
raise exception_from_error('Failed getting latest release', latest_release)
|
||||
|
||||
latest_release = latest_release.json()
|
||||
previous_sha = latest_release["target_commitish"]
|
||||
previous_sha = latest_release['target_commitish']
|
||||
|
||||
args = [
|
||||
"git",
|
||||
"--no-pager",
|
||||
"log",
|
||||
"--merges",
|
||||
"--grep",
|
||||
"Merge pull request",
|
||||
'--pretty=format:"%h|%s|%b|%p"',
|
||||
"{}...{}".format(previous_sha, commit_sha),
|
||||
]
|
||||
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
|
||||
log = subprocess.check_output(args)
|
||||
changes = ["Changes since {}:".format(latest_release["name"])]
|
||||
changes = ["Changes since {}:".format(latest_release['name'])]
|
||||
|
||||
for line in log.split("\n"):
|
||||
for line in log.split('\n'):
|
||||
try:
|
||||
sha, subject, body, parents = line[1:-1].split("|")
|
||||
sha, subject, body, parents = line[1:-1].split('|')
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pull_request = re.match(r"Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = " #{}".format(pull_request)
|
||||
except Exception:
|
||||
except Exception as ex:
|
||||
pull_request = ""
|
||||
|
||||
author = subprocess.check_output(["git", "log", "-1", '--pretty=format:"%an"', parents.split(" ")[-1]])[1:-1]
|
||||
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
|
||||
|
||||
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
|
||||
|
||||
return "\n".join(changes)
|
||||
|
||||
|
||||
def update_release_commit_sha(release, commit_sha):
|
||||
params = {
|
||||
"target_commitish": commit_sha,
|
||||
'target_commitish': commit_sha,
|
||||
}
|
||||
|
||||
response = _github_request("patch", "repos/{}/releases/{}".format(repo, release["id"]), params)
|
||||
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating commit sha for existing release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
def update_release(version, build_filepath, commit_sha):
|
||||
try:
|
||||
release = get_rc_release(version)
|
||||
@@ -148,22 +125,21 @@ def update_release(version, build_filepath, commit_sha):
|
||||
else:
|
||||
release = create_release(version, commit_sha)
|
||||
|
||||
print("Using release id: {}".format(release["id"]))
|
||||
print("Using release id: {}".format(release['id']))
|
||||
|
||||
remove_previous_builds(release)
|
||||
response = upload_asset(release, build_filepath)
|
||||
|
||||
changelog = get_changelog(commit_sha)
|
||||
|
||||
response = _github_request("patch", release["url"], {"body": changelog})
|
||||
response = _github_request('patch', release['url'], {'body': changelog})
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating release description", response)
|
||||
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == '__main__':
|
||||
commit_sha = sys.argv[1]
|
||||
version = sys.argv[2]
|
||||
filepath = sys.argv[3]
|
||||
|
||||
242
bin/upgrade
Executable file
@@ -0,0 +1,242 @@
|
||||
#!/usr/bin/env python3
|
||||
import urllib
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import requests
|
||||
|
||||
try:
|
||||
import semver
|
||||
except ImportError:
|
||||
print("Missing required library: semver.")
|
||||
exit(1)
|
||||
|
||||
REDASH_HOME = os.environ.get('REDASH_HOME', '/opt/redash')
|
||||
CURRENT_VERSION_PATH = '{}/current'.format(REDASH_HOME)
|
||||
|
||||
|
||||
def run(cmd, cwd=None):
|
||||
if not cwd:
|
||||
cwd = REDASH_HOME
|
||||
|
||||
return subprocess.check_output(cmd, cwd=cwd, shell=True, stderr=subprocess.STDOUT)
|
||||
|
||||
|
||||
def confirm(question):
|
||||
reply = str(input(question + ' (y/n): ')).lower().strip()
|
||||
|
||||
if reply[0] == 'y':
|
||||
return True
|
||||
if reply[0] == 'n':
|
||||
return False
|
||||
else:
|
||||
return confirm("Please use 'y' or 'n'")
|
||||
|
||||
|
||||
def version_path(version_name):
|
||||
return "{}/{}".format(REDASH_HOME, version_name)
|
||||
|
||||
END_CODE = '\033[0m'
|
||||
|
||||
|
||||
def colored_string(text, color):
|
||||
if sys.stdout.isatty():
|
||||
return "{}{}{}".format(color, text, END_CODE)
|
||||
else:
|
||||
return text
|
||||
|
||||
|
||||
def h1(text):
|
||||
print(colored_string(text, '\033[4m\033[1m'))
|
||||
|
||||
|
||||
def green(text):
|
||||
print(colored_string(text, '\033[92m'))
|
||||
|
||||
|
||||
def red(text):
|
||||
print(colored_string(text, '\033[91m'))
|
||||
|
||||
|
||||
class Release(namedtuple('Release', ('version', 'download_url', 'filename', 'description'))):
|
||||
def v1_or_newer(self):
|
||||
return semver.compare(self.version, '1.0.0-alpha') >= 0
|
||||
|
||||
def is_newer(self, version):
|
||||
return semver.compare(self.version, version) > 0
|
||||
|
||||
@property
|
||||
def version_name(self):
|
||||
return self.filename.replace('.tar.gz', '')
|
||||
|
||||
|
||||
def get_latest_release_from_ci():
|
||||
response = requests.get('https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts?branch=master')
|
||||
|
||||
if response.status_code != 200:
|
||||
exit("Failed getting releases (status code: %s)." % response.status_code)
|
||||
|
||||
tarball_asset = filter(lambda asset: asset['url'].endswith('.tar.gz'), response.json())[0]
|
||||
filename = urllib.unquote(tarball_asset['pretty_path'].split('/')[-1])
|
||||
version = filename.replace('redash.', '').replace('.tar.gz', '')
|
||||
|
||||
release = Release(version, tarball_asset['url'], filename, '')
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def get_release(channel):
|
||||
if channel == 'ci':
|
||||
return get_latest_release_from_ci()
|
||||
|
||||
response = requests.get('https://version.redash.io/api/releases?channel={}'.format(channel))
|
||||
release = response.json()[0]
|
||||
|
||||
filename = release['download_url'].split('/')[-1]
|
||||
release = Release(release['version'], release['download_url'], filename, release['description'])
|
||||
|
||||
return release
|
||||
|
||||
|
||||
def link_to_current(version_name):
|
||||
green("Linking to current version...")
|
||||
run('ln -nfs {} {}'.format(version_path(version_name), CURRENT_VERSION_PATH))
|
||||
|
||||
|
||||
def restart_services():
|
||||
# We're doing this instead of simple 'supervisorctl restart all' because
|
||||
# otherwise it won't notice that /opt/redash/current pointing at a different
|
||||
# directory.
|
||||
green("Restarting...")
|
||||
try:
|
||||
run('sudo /etc/init.d/redash_supervisord restart')
|
||||
except subprocess.CalledProcessError as e:
|
||||
run('sudo service supervisor restart')
|
||||
|
||||
|
||||
def update_requirements(version_name):
|
||||
green("Installing new Python packages (if needed)...")
|
||||
new_requirements_file = '{}/requirements.txt'.format(version_path(version_name))
|
||||
|
||||
install_requirements = False
|
||||
|
||||
try:
|
||||
run('diff {}/requirements.txt {}'.format(CURRENT_VERSION_PATH, new_requirements_file)) != 0
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.returncode != 0:
|
||||
install_requirements = True
|
||||
|
||||
if install_requirements:
|
||||
run('sudo pip install -r {}'.format(new_requirements_file))
|
||||
|
||||
|
||||
def apply_migrations(release):
|
||||
green("Running migrations (if needed)...")
|
||||
if not release.v1_or_newer():
|
||||
return apply_migrations_pre_v1(release.version_name)
|
||||
|
||||
run("sudo -u redash bin/run ./manage.py db upgrade", cwd=version_path(release.version_name))
|
||||
|
||||
|
||||
def find_migrations(version_name):
|
||||
current_migrations = set([f for f in os.listdir("{}/migrations".format(CURRENT_VERSION_PATH)) if fnmatch(f, '*_*.py')])
|
||||
new_migrations = sorted([f for f in os.listdir("{}/migrations".format(version_path(version_name))) if fnmatch(f, '*_*.py')])
|
||||
|
||||
return [m for m in new_migrations if m not in current_migrations]
|
||||
|
||||
|
||||
def apply_migrations_pre_v1(version_name):
|
||||
new_migrations = find_migrations(version_name)
|
||||
|
||||
if new_migrations:
|
||||
green("New migrations to run: ")
|
||||
print(', '.join(new_migrations))
|
||||
else:
|
||||
print("No new migrations in this version.")
|
||||
|
||||
if new_migrations and confirm("Apply new migrations? (make sure you have backup)"):
|
||||
for migration in new_migrations:
|
||||
print("Applying {}...".format(migration))
|
||||
run("sudo sudo -u redash PYTHONPATH=. bin/run python migrations/{}".format(migration), cwd=version_path(version_name))
|
||||
|
||||
|
||||
def download_and_unpack(release):
|
||||
directory_name = release.version_name
|
||||
|
||||
green("Downloading release tarball...")
|
||||
run('sudo wget --header="Accept: application/octet-stream" -O {} {}'.format(release.filename, release.download_url))
|
||||
green("Unpacking to: {}...".format(directory_name))
|
||||
run('sudo mkdir -p {}'.format(directory_name))
|
||||
run('sudo tar -C {} -xvf {}'.format(directory_name, release.filename))
|
||||
|
||||
green("Changing ownership to redash...")
|
||||
run('sudo chown redash {}'.format(directory_name))
|
||||
|
||||
green("Linking .env file...")
|
||||
run('sudo ln -nfs {}/.env {}/.env'.format(REDASH_HOME, version_path(directory_name)))
|
||||
|
||||
|
||||
def current_version():
|
||||
real_current_path = os.path.realpath(CURRENT_VERSION_PATH).replace('.b', '+b')
|
||||
return real_current_path.replace(REDASH_HOME + '/', '').replace('redash.', '')
|
||||
|
||||
|
||||
def verify_minimum_version():
|
||||
green("Current version: " + current_version())
|
||||
if semver.compare(current_version(), '0.12.0') < 0:
|
||||
red("You need to have Redash v0.12.0 or newer to upgrade to post v1.0.0 releases.")
|
||||
green("To upgrade to v0.12.0, run the upgrade script set to the legacy channel (--channel legacy).")
|
||||
exit(1)
|
||||
|
||||
|
||||
def show_description_and_confirm(description):
|
||||
if description:
|
||||
print(description)
|
||||
|
||||
if not confirm("Continue with upgrade?"):
|
||||
red("Cancelling upgrade.")
|
||||
exit(1)
|
||||
|
||||
|
||||
def verify_newer_version(release):
|
||||
if not release.is_newer(current_version()):
|
||||
red("The found release is not newer than your current deployed release ({}).".format(current_version()))
|
||||
if not confirm("Continue with upgrade?"):
|
||||
red("Cancelling upgrade.")
|
||||
exit(1)
|
||||
|
||||
|
||||
def deploy_release(channel):
|
||||
h1("Starting Redash upgrade:")
|
||||
|
||||
release = get_release(channel)
|
||||
green("Found version: {}".format(release.version))
|
||||
|
||||
if release.v1_or_newer():
|
||||
verify_minimum_version()
|
||||
|
||||
verify_newer_version(release)
|
||||
show_description_and_confirm(release.description)
|
||||
|
||||
try:
|
||||
download_and_unpack(release)
|
||||
update_requirements(release.version_name)
|
||||
apply_migrations(release)
|
||||
link_to_current(release.version_name)
|
||||
restart_services()
|
||||
green("Done! Enjoy.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
red("Failed running: {}".format(e.cmd))
|
||||
red("Exit status: {}\nOutput:\n{}".format(e.returncode, e.output))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--channel", help="The channel to get release from (default: stable).", default='stable')
|
||||
args = parser.parse_args()
|
||||
|
||||
deploy_release(args.channel)
|
||||
|
Before Width: | Height: | Size: 97 KiB |
|
Before Width: | Height: | Size: 3.2 KiB |
BIN
client/app/assets/images/db-logos/dynamodb_sql.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 5.2 KiB |
|
Before Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 29 KiB |
BIN
client/app/assets/images/db-logos/qubole.png
Normal file
|
After Width: | Height: | Size: 2.4 KiB |
|
Before Width: | Height: | Size: 9.7 KiB |
|
Before Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 8.5 KiB |
|
Before Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 45 KiB |
|
Before Width: | Height: | Size: 7.0 KiB |
BIN
client/app/assets/images/destinations/hipchat.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 4.1 KiB |
|
Before Width: | Height: | Size: 22 KiB |
@@ -90,23 +90,6 @@ body.fixed-layout {
|
||||
.embed__vis {
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
height: calc(~'100vh - 25px');
|
||||
|
||||
> .embed-heading {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
> .query__vis {
|
||||
flex: 1 1 auto;
|
||||
|
||||
.chart-visualization-container, .visualization-renderer-wrapper, .visualization-renderer {
|
||||
height: 100%
|
||||
}
|
||||
}
|
||||
|
||||
> .tile__bottom-control {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
@@ -223,7 +206,6 @@ body.fixed-layout {
|
||||
}
|
||||
|
||||
.editor__left__schema {
|
||||
min-height: 120px;
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
@@ -17,13 +17,6 @@ export default function ApplicationArea() {
|
||||
useEffect(() => {
|
||||
function globalErrorHandler(event) {
|
||||
event.preventDefault();
|
||||
if (event.message === "Uncaught SyntaxError: Unexpected token '<'") {
|
||||
// if we see a javascript error on unexpected token where the unexpected token is '<', this usually means that a fallback html file (like index.html)
|
||||
// was served as content of script rather than the expected script, give a friendlier message in the console on what could be going on
|
||||
console.error(
|
||||
`[Uncaught SyntaxError: Unexpected token '<'] usually means that a fallback html file was returned from server rather than the expected script. Check that the server is properly serving the file ${event.filename}.`
|
||||
);
|
||||
}
|
||||
setUnhandledError(event.error);
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ function BeaconConsent() {
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = (confirm) => {
|
||||
const confirmConsent = confirm => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
@@ -47,8 +47,7 @@ function BeaconConsent() {
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}
|
||||
>
|
||||
bordered={false}>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
@@ -67,7 +66,8 @@ function BeaconConsent() {
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the <Link href="settings/general">Settings</Link> page.
|
||||
You can change this setting anytime from the{" "}
|
||||
<Link href="settings/organization">Organization Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
@@ -12,7 +12,6 @@ import { wrap as wrapDialog, DialogPropType } from "@/components/DialogWrapper";
|
||||
import QuerySelector from "@/components/QuerySelector";
|
||||
import { Query } from "@/services/query";
|
||||
import { useUniqueId } from "@/lib/hooks/useUniqueId";
|
||||
import "./EditParameterSettingsDialog.less";
|
||||
|
||||
const { Option } = Select;
|
||||
const formItemProps = { labelCol: { span: 6 }, wrapperCol: { span: 16 } };
|
||||
@@ -27,7 +26,7 @@ function isTypeDateRange(type) {
|
||||
|
||||
function joinExampleList(multiValuesOptions) {
|
||||
const { prefix, suffix } = multiValuesOptions;
|
||||
return ["value1", "value2", "value3"].map((value) => `${prefix}${value}${suffix}`).join(",");
|
||||
return ["value1", "value2", "value3"].map(value => `${prefix}${value}${suffix}`).join(",");
|
||||
}
|
||||
|
||||
function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
@@ -55,7 +54,7 @@ function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
|
||||
return (
|
||||
<Form.Item required label="Keyword" help={helpText} validateStatus={validateStatus} {...formItemProps}>
|
||||
<Input onChange={(e) => onChange(e.target.value)} autoFocus />
|
||||
<Input onChange={e => onChange(e.target.value)} autoFocus />
|
||||
</Form.Item>
|
||||
);
|
||||
}
|
||||
@@ -72,8 +71,6 @@ function EditParameterSettingsDialog(props) {
|
||||
const [param, setParam] = useState(clone(props.parameter));
|
||||
const [isNameValid, setIsNameValid] = useState(true);
|
||||
const [initialQuery, setInitialQuery] = useState();
|
||||
const [userInput, setUserInput] = useState(param.regex || "");
|
||||
const [isValidRegex, setIsValidRegex] = useState(true);
|
||||
|
||||
const isNew = !props.parameter.name;
|
||||
|
||||
@@ -117,17 +114,6 @@ function EditParameterSettingsDialog(props) {
|
||||
|
||||
const paramFormId = useUniqueId("paramForm");
|
||||
|
||||
const handleRegexChange = (e) => {
|
||||
setUserInput(e.target.value);
|
||||
try {
|
||||
new RegExp(e.target.value);
|
||||
setParam({ ...param, regex: e.target.value });
|
||||
setIsValidRegex(true);
|
||||
} catch (error) {
|
||||
setIsValidRegex(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
{...props.dialog.props}
|
||||
@@ -143,17 +129,15 @@ function EditParameterSettingsDialog(props) {
|
||||
disabled={!isFulfilled()}
|
||||
type="primary"
|
||||
form={paramFormId}
|
||||
data-test="SaveParameterSettings"
|
||||
>
|
||||
data-test="SaveParameterSettings">
|
||||
{isNew ? "Add Parameter" : "OK"}
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
]}>
|
||||
<Form layout="horizontal" onFinish={onConfirm} id={paramFormId}>
|
||||
{isNew && (
|
||||
<NameInput
|
||||
name={param.name}
|
||||
onChange={(name) => setParam({ ...param, name })}
|
||||
onChange={name => setParam({ ...param, name })}
|
||||
setValidation={setIsNameValid}
|
||||
existingNames={props.existingParams}
|
||||
type={param.type}
|
||||
@@ -162,16 +146,15 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item required label="Title" {...formItemProps}>
|
||||
<Input
|
||||
value={isNull(param.title) ? getDefaultTitle(param.name) : param.title}
|
||||
onChange={(e) => setParam({ ...param, title: e.target.value })}
|
||||
onChange={e => setParam({ ...param, title: e.target.value })}
|
||||
data-test="ParameterTitleInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item label="Type" {...formItemProps}>
|
||||
<Select value={param.type} onChange={(type) => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Select value={param.type} onChange={type => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Option value="text" data-test="TextParameterTypeOption">
|
||||
Text
|
||||
</Option>
|
||||
<Option value="text-pattern">Text Pattern</Option>
|
||||
<Option value="number" data-test="NumberParameterTypeOption">
|
||||
Number
|
||||
</Option>
|
||||
@@ -197,26 +180,12 @@ function EditParameterSettingsDialog(props) {
|
||||
<Option value="datetime-range-with-seconds">Date and Time Range (with seconds)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
{param.type === "text-pattern" && (
|
||||
<Form.Item
|
||||
label="Regex"
|
||||
help={!isValidRegex ? "Invalid Regex Pattern" : "Valid Regex Pattern"}
|
||||
{...formItemProps}
|
||||
>
|
||||
<Input
|
||||
value={userInput}
|
||||
onChange={handleRegexChange}
|
||||
className={!isValidRegex ? "input-error" : ""}
|
||||
data-test="RegexPatternInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
{param.type === "enum" && (
|
||||
<Form.Item label="Values" help="Dropdown list values (newline delimited)" {...formItemProps}>
|
||||
<Input.TextArea
|
||||
rows={3}
|
||||
value={param.enumOptions}
|
||||
onChange={(e) => setParam({ ...param, enumOptions: e.target.value })}
|
||||
onChange={e => setParam({ ...param, enumOptions: e.target.value })}
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
@@ -224,7 +193,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item label="Query" help="Select query to load dropdown values from" {...formItemProps}>
|
||||
<QuerySelector
|
||||
selectedQuery={initialQuery}
|
||||
onChange={(q) => setParam({ ...param, queryId: q && q.id })}
|
||||
onChange={q => setParam({ ...param, queryId: q && q.id })}
|
||||
type="select"
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -233,7 +202,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item className="m-b-0" label=" " colon={false} {...formItemProps}>
|
||||
<Checkbox
|
||||
defaultChecked={!!param.multiValuesOptions}
|
||||
onChange={(e) =>
|
||||
onChange={e =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: e.target.checked
|
||||
@@ -245,8 +214,7 @@ function EditParameterSettingsDialog(props) {
|
||||
: null,
|
||||
})
|
||||
}
|
||||
data-test="AllowMultipleValuesCheckbox"
|
||||
>
|
||||
data-test="AllowMultipleValuesCheckbox">
|
||||
Allow multiple values
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
@@ -259,11 +227,10 @@ function EditParameterSettingsDialog(props) {
|
||||
Placed in query as: <code>{joinExampleList(param.multiValuesOptions)}</code>
|
||||
</React.Fragment>
|
||||
}
|
||||
{...formItemProps}
|
||||
>
|
||||
{...formItemProps}>
|
||||
<Select
|
||||
value={param.multiValuesOptions.prefix}
|
||||
onChange={(quoteOption) =>
|
||||
onChange={quoteOption =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: {
|
||||
@@ -273,8 +240,7 @@ function EditParameterSettingsDialog(props) {
|
||||
},
|
||||
})
|
||||
}
|
||||
data-test="QuotationSelect"
|
||||
>
|
||||
data-test="QuotationSelect">
|
||||
<Option value="">None (default)</Option>
|
||||
<Option value="'">Single Quotation Mark</Option>
|
||||
<Option value={'"'} data-test="DoubleQuotationMarkOption">
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
.input-error {
|
||||
border-color: red !important;
|
||||
}
|
||||
@@ -101,7 +101,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
}
|
||||
|
||||
loadIframe = (url) => {
|
||||
loadIframe = url => {
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
this.setState({ loading: true, error: false });
|
||||
|
||||
@@ -116,8 +116,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
clearTimeout(this.iframeLoadingTimeout);
|
||||
};
|
||||
|
||||
onPostMessageReceived = (event) => {
|
||||
if (!some(allowedDomains, (domain) => startsWith(event.origin, domain))) {
|
||||
onPostMessageReceived = event => {
|
||||
if (!some(allowedDomains, domain => startsWith(event.origin, domain))) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -134,7 +134,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
return helpTriggerType ? helpTriggerType[0] : this.props.href;
|
||||
};
|
||||
|
||||
openDrawer = (e) => {
|
||||
openDrawer = e => {
|
||||
// keep "open in new tab" behavior
|
||||
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
|
||||
e.preventDefault();
|
||||
@@ -144,7 +144,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
}
|
||||
};
|
||||
|
||||
closeDrawer = (event) => {
|
||||
closeDrawer = event => {
|
||||
if (event) {
|
||||
event.preventDefault();
|
||||
}
|
||||
@@ -161,7 +161,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
|
||||
const className = cx("help-trigger", this.props.className);
|
||||
const url = this.state.currentUrl;
|
||||
const isAllowedDomain = some(allowedDomains, (domain) => startsWith(url || targetUrl, domain));
|
||||
const isAllowedDomain = some(allowedDomains, domain => startsWith(url || targetUrl, domain));
|
||||
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
|
||||
|
||||
return (
|
||||
@@ -180,15 +180,13 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
)}
|
||||
</>
|
||||
) : null
|
||||
}
|
||||
>
|
||||
}>
|
||||
<Link
|
||||
href={url || this.getUrl()}
|
||||
className={className}
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}
|
||||
>
|
||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}>
|
||||
{this.props.children}
|
||||
</Link>
|
||||
</Tooltip>
|
||||
@@ -199,8 +197,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
||||
visible={this.state.visible}
|
||||
className={cx("help-drawer", drawerClassName)}
|
||||
destroyOnClose
|
||||
width={400}
|
||||
>
|
||||
width={400}>
|
||||
<div className="drawer-wrapper">
|
||||
<div className="drawer-menu">
|
||||
{url && (
|
||||
|
||||
@@ -33,10 +33,10 @@ export const MappingType = {
|
||||
};
|
||||
|
||||
export function parameterMappingsToEditableMappings(mappings, parameters, existingParameterNames = []) {
|
||||
return map(mappings, (mapping) => {
|
||||
return map(mappings, mapping => {
|
||||
const result = extend({}, mapping);
|
||||
const alreadyExists = includes(existingParameterNames, mapping.mapTo);
|
||||
result.param = find(parameters, (p) => p.name === mapping.name);
|
||||
result.param = find(parameters, p => p.name === mapping.name);
|
||||
switch (mapping.type) {
|
||||
case ParameterMappingType.DashboardLevel:
|
||||
result.type = alreadyExists ? MappingType.DashboardMapToExisting : MappingType.DashboardAddNew;
|
||||
@@ -62,7 +62,7 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
map(
|
||||
// convert to map
|
||||
mappings,
|
||||
(mapping) => {
|
||||
mapping => {
|
||||
const result = extend({}, mapping);
|
||||
switch (mapping.type) {
|
||||
case MappingType.DashboardAddNew:
|
||||
@@ -95,11 +95,11 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
export function synchronizeWidgetTitles(sourceMappings, widgets) {
|
||||
const affectedWidgets = [];
|
||||
|
||||
each(sourceMappings, (sourceMapping) => {
|
||||
each(sourceMappings, sourceMapping => {
|
||||
if (sourceMapping.type === ParameterMappingType.DashboardLevel) {
|
||||
each(widgets, (widget) => {
|
||||
each(widgets, widget => {
|
||||
const widgetMappings = widget.options.parameterMappings;
|
||||
each(widgetMappings, (widgetMapping) => {
|
||||
each(widgetMappings, widgetMapping => {
|
||||
// check if mapped to the same dashboard-level parameter
|
||||
if (
|
||||
widgetMapping.type === ParameterMappingType.DashboardLevel &&
|
||||
@@ -140,7 +140,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
className: "form-item",
|
||||
};
|
||||
|
||||
updateSourceType = (type) => {
|
||||
updateSourceType = type => {
|
||||
let {
|
||||
mapping: { mapTo },
|
||||
} = this.props;
|
||||
@@ -155,7 +155,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
this.updateParamMapping({ type, mapTo });
|
||||
};
|
||||
|
||||
updateParamMapping = (update) => {
|
||||
updateParamMapping = update => {
|
||||
const { onChange, mapping } = this.props;
|
||||
const newMapping = extend({}, mapping, update);
|
||||
if (newMapping.value !== mapping.value) {
|
||||
@@ -175,7 +175,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
renderMappingTypeSelector() {
|
||||
const noExisting = isEmpty(this.props.existingParamNames);
|
||||
return (
|
||||
<Radio.Group value={this.props.mapping.type} onChange={(e) => this.updateSourceType(e.target.value)}>
|
||||
<Radio.Group value={this.props.mapping.type} onChange={e => this.updateSourceType(e.target.value)}>
|
||||
<Radio className="radio" value={MappingType.DashboardAddNew} data-test="NewDashboardParameterOption">
|
||||
New dashboard parameter
|
||||
</Radio>
|
||||
@@ -205,16 +205,16 @@ export class ParameterMappingInput extends React.Component {
|
||||
<Input
|
||||
value={mapTo}
|
||||
aria-label="Parameter name (key)"
|
||||
onChange={(e) => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
onChange={e => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderDashboardMapToExisting() {
|
||||
const { mapping, existingParamNames } = this.props;
|
||||
const options = map(existingParamNames, (paramName) => ({ label: paramName, value: paramName }));
|
||||
const options = map(existingParamNames, paramName => ({ label: paramName, value: paramName }));
|
||||
|
||||
return <Select value={mapping.mapTo} onChange={(mapTo) => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
return <Select value={mapping.mapTo} onChange={mapTo => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
}
|
||||
|
||||
renderStaticValue() {
|
||||
@@ -226,8 +226,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
enumOptions={mapping.param.enumOptions}
|
||||
queryId={mapping.param.queryId}
|
||||
parameter={mapping.param}
|
||||
onSelect={(value) => this.updateParamMapping({ value })}
|
||||
regex={mapping.param.regex}
|
||||
onSelect={value => this.updateParamMapping({ value })}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -285,12 +284,12 @@ class MappingEditor extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
onVisibleChange = (visible) => {
|
||||
onVisibleChange = visible => {
|
||||
if (visible) this.show();
|
||||
else this.hide();
|
||||
};
|
||||
|
||||
onChange = (mapping) => {
|
||||
onChange = mapping => {
|
||||
let inputError = null;
|
||||
|
||||
if (mapping.type === MappingType.DashboardAddNew) {
|
||||
@@ -352,8 +351,7 @@ class MappingEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderContent()}
|
||||
visible={visible}
|
||||
onVisibleChange={this.onVisibleChange}
|
||||
>
|
||||
onVisibleChange={this.onVisibleChange}>
|
||||
<Button size="small" type="dashed" data-test={`EditParamMappingButton-${mapping.param.name}`}>
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -378,14 +376,14 @@ class TitleEditor extends React.Component {
|
||||
title: "", // will be set on editing
|
||||
};
|
||||
|
||||
onPopupVisibleChange = (showPopup) => {
|
||||
onPopupVisibleChange = showPopup => {
|
||||
this.setState({
|
||||
showPopup,
|
||||
title: showPopup ? this.getMappingTitle() : "",
|
||||
});
|
||||
};
|
||||
|
||||
onEditingTitleChange = (event) => {
|
||||
onEditingTitleChange = event => {
|
||||
this.setState({ title: event.target.value });
|
||||
};
|
||||
|
||||
@@ -462,8 +460,7 @@ class TitleEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderPopover()}
|
||||
visible={this.state.showPopup}
|
||||
onVisibleChange={this.onPopupVisibleChange}
|
||||
>
|
||||
onVisibleChange={this.onPopupVisibleChange}>
|
||||
<Button size="small" type="dashed">
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -511,7 +508,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
// just to be safe, array or object
|
||||
if (typeof value === "object") {
|
||||
return map(value, (v) => this.getStringValue(v)).join(", ");
|
||||
return map(value, v => this.getStringValue(v)).join(", ");
|
||||
}
|
||||
|
||||
// rest
|
||||
@@ -577,7 +574,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
render() {
|
||||
const { existingParams } = this.props; // eslint-disable-line react/prop-types
|
||||
const dataSource = this.props.mappings.map((mapping) => ({ mapping }));
|
||||
const dataSource = this.props.mappings.map(mapping => ({ mapping }));
|
||||
|
||||
return (
|
||||
<div className="parameters-mapping-list">
|
||||
@@ -586,11 +583,11 @@ export class ParameterMappingListInput extends React.Component {
|
||||
title="Title"
|
||||
dataIndex="mapping"
|
||||
key="title"
|
||||
render={(mapping) => (
|
||||
render={mapping => (
|
||||
<TitleEditor
|
||||
existingParams={existingParams}
|
||||
mapping={mapping}
|
||||
onChange={(newMapping) => this.updateParamMapping(mapping, newMapping)}
|
||||
onChange={newMapping => this.updateParamMapping(mapping, newMapping)}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
@@ -599,19 +596,19 @@ export class ParameterMappingListInput extends React.Component {
|
||||
dataIndex="mapping"
|
||||
key="keyword"
|
||||
className="keyword"
|
||||
render={(mapping) => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
render={mapping => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Default Value"
|
||||
dataIndex="mapping"
|
||||
key="value"
|
||||
render={(mapping) => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
render={mapping => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Value Source"
|
||||
dataIndex="mapping"
|
||||
key="source"
|
||||
render={(mapping) => {
|
||||
render={mapping => {
|
||||
const existingParamsNames = existingParams
|
||||
.filter(({ type }) => type === mapping.param.type) // exclude mismatching param types
|
||||
.map(({ name }) => name); // keep names only
|
||||
|
||||
@@ -9,12 +9,11 @@ import DateRangeParameter from "@/components/dynamic-parameters/DateRangeParamet
|
||||
import QueryBasedParameterInput from "./QueryBasedParameterInput";
|
||||
|
||||
import "./ParameterValueInput.less";
|
||||
import Tooltip from "./Tooltip";
|
||||
|
||||
const multipleValuesProps = {
|
||||
maxTagCount: 3,
|
||||
maxTagTextLength: 10,
|
||||
maxTagPlaceholder: (num) => `+${num.length} more`,
|
||||
maxTagPlaceholder: num => `+${num.length} more`,
|
||||
};
|
||||
|
||||
class ParameterValueInput extends React.Component {
|
||||
@@ -26,7 +25,6 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: PropTypes.any, // eslint-disable-line react/forbid-prop-types
|
||||
onSelect: PropTypes.func,
|
||||
className: PropTypes.string,
|
||||
regex: PropTypes.string,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
@@ -37,7 +35,6 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: null,
|
||||
onSelect: () => {},
|
||||
className: "",
|
||||
regex: "",
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
@@ -48,7 +45,7 @@ class ParameterValueInput extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
componentDidUpdate = (prevProps) => {
|
||||
componentDidUpdate = prevProps => {
|
||||
const { value, parameter } = this.props;
|
||||
// if value prop updated, reset dirty state
|
||||
if (prevProps.value !== value || prevProps.parameter !== parameter) {
|
||||
@@ -59,7 +56,7 @@ class ParameterValueInput extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onSelect = (value) => {
|
||||
onSelect = value => {
|
||||
const isDirty = !isEqual(value, this.props.value);
|
||||
this.setState({ value, isDirty });
|
||||
this.props.onSelect(value, isDirty);
|
||||
@@ -96,9 +93,9 @@ class ParameterValueInput extends React.Component {
|
||||
renderEnumInput() {
|
||||
const { enumOptions, parameter } = this.props;
|
||||
const { value } = this.state;
|
||||
const enumOptionsArray = enumOptions.split("\n").filter((v) => v !== "");
|
||||
const enumOptionsArray = enumOptions.split("\n").filter(v => v !== "");
|
||||
// Antd Select doesn't handle null in multiple mode
|
||||
const normalize = (val) => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
const normalize = val => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
|
||||
return (
|
||||
<SelectWithVirtualScroll
|
||||
@@ -106,7 +103,7 @@ class ParameterValueInput extends React.Component {
|
||||
mode={parameter.multiValuesOptions ? "multiple" : "default"}
|
||||
value={normalize(value)}
|
||||
onChange={this.onSelect}
|
||||
options={map(enumOptionsArray, (opt) => ({ label: String(opt), value: opt }))}
|
||||
options={map(enumOptionsArray, opt => ({ label: String(opt), value: opt }))}
|
||||
showSearch
|
||||
showArrow
|
||||
notFoundContent={isEmpty(enumOptionsArray) ? "No options available" : null}
|
||||
@@ -136,36 +133,18 @@ class ParameterValueInput extends React.Component {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
const normalize = (val) => (isNaN(val) ? undefined : val);
|
||||
const normalize = val => (isNaN(val) ? undefined : val);
|
||||
|
||||
return (
|
||||
<InputNumber
|
||||
className={className}
|
||||
value={normalize(value)}
|
||||
aria-label="Parameter number value"
|
||||
onChange={(val) => this.onSelect(normalize(val))}
|
||||
onChange={val => this.onSelect(normalize(val))}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextPatternInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
return (
|
||||
<React.Fragment>
|
||||
<Tooltip title={`Regex to match: ${this.props.regex}`} placement="right">
|
||||
<Input
|
||||
className={className}
|
||||
value={value}
|
||||
aria-label="Parameter text pattern value"
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
@@ -176,7 +155,7 @@ class ParameterValueInput extends React.Component {
|
||||
value={value}
|
||||
aria-label="Parameter text value"
|
||||
data-test="TextParamInput"
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
onChange={e => this.onSelect(e.target.value)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -198,8 +177,6 @@ class ParameterValueInput extends React.Component {
|
||||
return this.renderQueryBasedInput();
|
||||
case "number":
|
||||
return this.renderNumberInput();
|
||||
case "text-pattern":
|
||||
return this.renderTextPatternInput();
|
||||
default:
|
||||
return this.renderTextInput();
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { size, filter, forEach, extend, isEmpty } from "lodash";
|
||||
import { size, filter, forEach, extend } from "lodash";
|
||||
import React from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import { SortableContainer, SortableElement, DragHandle } from "@redash/viz/lib/components/sortable";
|
||||
@@ -14,7 +14,7 @@ import "./Parameters.less";
|
||||
|
||||
function updateUrl(parameters) {
|
||||
const params = extend({}, location.search);
|
||||
parameters.forEach((param) => {
|
||||
parameters.forEach(param => {
|
||||
extend(params, param.toUrlParams());
|
||||
});
|
||||
location.setSearch(params, true);
|
||||
@@ -43,26 +43,16 @@ export default class Parameters extends React.Component {
|
||||
appendSortableToParent: true,
|
||||
};
|
||||
|
||||
toCamelCase = (str) => {
|
||||
if (isEmpty(str)) {
|
||||
return "";
|
||||
}
|
||||
return str.replace(/\s+/g, "").toLowerCase();
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
const { parameters, disableUrlUpdate } = props;
|
||||
const { parameters } = props;
|
||||
this.state = { parameters };
|
||||
if (!disableUrlUpdate) {
|
||||
if (!props.disableUrlUpdate) {
|
||||
updateUrl(parameters);
|
||||
}
|
||||
const hideRegex = /hide_filter=([^&]+)/g;
|
||||
const matches = window.location.search.matchAll(hideRegex);
|
||||
this.hideValues = Array.from(matches, (match) => match[1]);
|
||||
}
|
||||
|
||||
componentDidUpdate = (prevProps) => {
|
||||
componentDidUpdate = prevProps => {
|
||||
const { parameters, disableUrlUpdate } = this.props;
|
||||
const parametersChanged = prevProps.parameters !== parameters;
|
||||
const disableUrlUpdateChanged = prevProps.disableUrlUpdate !== disableUrlUpdate;
|
||||
@@ -74,7 +64,7 @@ export default class Parameters extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
handleKeyDown = (e) => {
|
||||
handleKeyDown = e => {
|
||||
// Cmd/Ctrl/Alt + Enter
|
||||
if (e.keyCode === 13 && (e.ctrlKey || e.metaKey || e.altKey)) {
|
||||
e.stopPropagation();
|
||||
@@ -109,8 +99,8 @@ export default class Parameters extends React.Component {
|
||||
applyChanges = () => {
|
||||
const { onValuesChange, disableUrlUpdate } = this.props;
|
||||
this.setState(({ parameters }) => {
|
||||
const parametersWithPendingValues = parameters.filter((p) => p.hasPendingValue);
|
||||
forEach(parameters, (p) => p.applyPendingValue());
|
||||
const parametersWithPendingValues = parameters.filter(p => p.hasPendingValue);
|
||||
forEach(parameters, p => p.applyPendingValue());
|
||||
if (!disableUrlUpdate) {
|
||||
updateUrl(parameters);
|
||||
}
|
||||
@@ -121,7 +111,7 @@ export default class Parameters extends React.Component {
|
||||
|
||||
showParameterSettings = (parameter, index) => {
|
||||
const { onParametersEdit } = this.props;
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose((updated) => {
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose(updated => {
|
||||
this.setState(({ parameters }) => {
|
||||
const updatedParameter = extend(parameter, updated);
|
||||
parameters[index] = createParameter(updatedParameter, updatedParameter.parentQueryId);
|
||||
@@ -132,13 +122,7 @@ export default class Parameters extends React.Component {
|
||||
};
|
||||
|
||||
renderParameter(param, index) {
|
||||
if (this.hideValues.some((value) => this.toCamelCase(value) === this.toCamelCase(param.name))) {
|
||||
return null;
|
||||
}
|
||||
const { editable } = this.props;
|
||||
if (param.hidden) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div key={param.name} className="di-block" data-test={`ParameterName-${param.name}`}>
|
||||
<div className="parameter-heading">
|
||||
@@ -149,13 +133,11 @@ export default class Parameters extends React.Component {
|
||||
aria-label="Edit"
|
||||
onClick={() => this.showParameterSettings(param, index)}
|
||||
data-test={`ParameterSettings-${param.name}`}
|
||||
type="button"
|
||||
>
|
||||
type="button">
|
||||
<i className="fa fa-cog" aria-hidden="true" />
|
||||
</PlainButton>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<ParameterValueInput
|
||||
type={param.type}
|
||||
value={param.normalizedValue}
|
||||
@@ -163,7 +145,6 @@ export default class Parameters extends React.Component {
|
||||
enumOptions={param.enumOptions}
|
||||
queryId={param.queryId}
|
||||
onSelect={(value, isDirty) => this.setPendingValue(param, value, isDirty)}
|
||||
regex={param.regex}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@@ -173,6 +154,7 @@ export default class Parameters extends React.Component {
|
||||
const { parameters } = this.state;
|
||||
const { sortable, appendSortableToParent } = this.props;
|
||||
const dirtyParamCount = size(filter(parameters, "hasPendingValue"));
|
||||
|
||||
return (
|
||||
<SortableContainer
|
||||
disabled={!sortable}
|
||||
@@ -180,27 +162,24 @@ export default class Parameters extends React.Component {
|
||||
useDragHandle
|
||||
lockToContainerEdges
|
||||
helperClass="parameter-dragged"
|
||||
helperContainer={(containerEl) => (appendSortableToParent ? containerEl : document.body)}
|
||||
helperContainer={containerEl => (appendSortableToParent ? containerEl : document.body)}
|
||||
updateBeforeSortStart={this.onBeforeSortStart}
|
||||
onSortEnd={this.moveParameter}
|
||||
containerProps={{
|
||||
className: "parameter-container",
|
||||
onKeyDown: dirtyParamCount ? this.handleKeyDown : null,
|
||||
}}
|
||||
>
|
||||
{parameters &&
|
||||
parameters.map((param, index) => (
|
||||
<SortableElement key={param.name} index={index}>
|
||||
<div
|
||||
className="parameter-block"
|
||||
data-editable={sortable || null}
|
||||
data-test={`ParameterBlock-${param.name}`}
|
||||
>
|
||||
{sortable && <DragHandle data-test={`DragHandle-${param.name}`} />}
|
||||
{this.renderParameter(param, index)}
|
||||
</div>
|
||||
</SortableElement>
|
||||
))}
|
||||
}}>
|
||||
{parameters.map((param, index) => (
|
||||
<SortableElement key={param.name} index={index}>
|
||||
<div
|
||||
className="parameter-block"
|
||||
data-editable={sortable || null}
|
||||
data-test={`ParameterBlock-${param.name}`}>
|
||||
{sortable && <DragHandle data-test={`DragHandle-${param.name}`} />}
|
||||
{this.renderParameter(param, index)}
|
||||
</div>
|
||||
</SortableElement>
|
||||
))}
|
||||
<ParameterApplyButton onClick={this.applyChanges} paramCount={dirtyParamCount} />
|
||||
</SortableContainer>
|
||||
);
|
||||
|
||||
@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
|
||||
// DataSourcePreviewCard
|
||||
|
||||
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
|
||||
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
|
||||
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
|
||||
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
|
||||
return (
|
||||
<PreviewCard {...props} imageUrl={imageUrl} title={title}>
|
||||
|
||||
@@ -36,7 +36,6 @@ function SelectWithVirtualScroll({ options, ...props }: VirtualScrollSelectProps
|
||||
<AntdSelect<string>
|
||||
dropdownMatchSelectWidth={dropdownMatchSelectWidth}
|
||||
options={options}
|
||||
allowClear={true}
|
||||
optionFilterProp="label" // as this component expects "options" prop
|
||||
{...props}
|
||||
/>
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
right: 0;
|
||||
background: linear-gradient(to bottom, transparent, transparent 2px, #f6f8f9 2px, #f6f8f9 5px),
|
||||
linear-gradient(to left, #b3babf, #b3babf 1px, transparent 1px, transparent);
|
||||
background-size: calc((100% + 15px) / 12) 5px;
|
||||
background-size: calc((100% + 15px) / 6) 5px;
|
||||
background-position: -7px 1px;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,7 +151,6 @@ export default function DynamicForm({
|
||||
onSubmit,
|
||||
}) {
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [isTouched, setIsTouched] = useState(false);
|
||||
const [showExtraFields, setShowExtraFields] = useState(defaultShowExtraFields);
|
||||
const [form] = Form.useForm();
|
||||
const extraFields = filter(fields, { extra: true });
|
||||
@@ -164,8 +163,9 @@ export default function DynamicForm({
|
||||
onSubmit(
|
||||
values,
|
||||
msg => {
|
||||
const { setFieldsValue, getFieldsValue } = form;
|
||||
setIsSubmitting(false);
|
||||
setIsTouched(false); // reset form touched state
|
||||
setFieldsValue(getFieldsValue()); // reset form touched state
|
||||
notification.success(msg);
|
||||
},
|
||||
msg => {
|
||||
@@ -174,7 +174,7 @@ export default function DynamicForm({
|
||||
}
|
||||
);
|
||||
},
|
||||
[fields, onSubmit]
|
||||
[form, fields, onSubmit]
|
||||
);
|
||||
|
||||
const handleFinishFailed = useCallback(
|
||||
@@ -187,9 +187,6 @@ export default function DynamicForm({
|
||||
return (
|
||||
<Form
|
||||
form={form}
|
||||
onFieldsChange={() => {
|
||||
setIsTouched(true);
|
||||
}}
|
||||
id={id}
|
||||
className="dynamic-form"
|
||||
layout="vertical"
|
||||
@@ -219,7 +216,7 @@ export default function DynamicForm({
|
||||
{saveText}
|
||||
</Button>
|
||||
)}
|
||||
<DynamicFormActions actions={actions} isFormDirty={isTouched} />
|
||||
<DynamicFormActions actions={actions} isFormDirty={form.isFieldsTouched()} />
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -96,7 +96,7 @@ function EmptyState({
|
||||
}, []);
|
||||
|
||||
// Show if `onboardingMode=false` or any requested step not completed
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
|
||||
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
|
||||
|
||||
if (!shouldShow) {
|
||||
return null;
|
||||
@@ -181,7 +181,7 @@ function EmptyState({
|
||||
];
|
||||
|
||||
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
|
||||
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
|
||||
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
|
||||
|
||||
return (
|
||||
<div className="empty-state-wrapper">
|
||||
@@ -196,7 +196,7 @@ function EmptyState({
|
||||
</div>
|
||||
<div className="empty-state__steps">
|
||||
<h4>Let's get started</h4>
|
||||
<ol>{stepsItems.map((item) => item.node)}</ol>
|
||||
<ol>{stepsItems.map(item => item.node)}</ol>
|
||||
{helpMessage}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -28,7 +28,6 @@ export interface Controller<I, P = any> {
|
||||
orderByField?: string;
|
||||
orderByReverse: boolean;
|
||||
toggleSorting: (orderByField: string) => void;
|
||||
setSorting: (orderByField: string, orderByReverse: boolean) => void;
|
||||
|
||||
// pagination
|
||||
page: number;
|
||||
@@ -140,11 +139,10 @@ export function wrap<I, P = any>(
|
||||
this.props.onError!(error);
|
||||
|
||||
const initialState = this.getState({ ...itemsSource.getState(), isLoaded: false });
|
||||
const { updatePagination, toggleSorting, setSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
|
||||
const { updatePagination, toggleSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
|
||||
this.state = {
|
||||
...initialState,
|
||||
toggleSorting, // eslint-disable-line react/no-unused-state
|
||||
setSorting, // eslint-disable-line react/no-unused-state
|
||||
updateSearch: debounce(updateSearch, 200), // eslint-disable-line react/no-unused-state
|
||||
updateSelectedTags, // eslint-disable-line react/no-unused-state
|
||||
updatePagination, // eslint-disable-line react/no-unused-state
|
||||
|
||||
@@ -39,12 +39,14 @@ export class ItemsSource {
|
||||
const customParams = {};
|
||||
const context = {
|
||||
...this.getCallbackContext(),
|
||||
setCustomParams: (params) => {
|
||||
setCustomParams: params => {
|
||||
extend(customParams, params);
|
||||
},
|
||||
};
|
||||
return this._beforeUpdate().then(() => {
|
||||
const fetchToken = Math.random().toString(36).substr(2);
|
||||
const fetchToken = Math.random()
|
||||
.toString(36)
|
||||
.substr(2);
|
||||
this._currentFetchToken = fetchToken;
|
||||
return this._fetcher
|
||||
.fetch(changes, state, context)
|
||||
@@ -57,7 +59,7 @@ export class ItemsSource {
|
||||
return this._afterUpdate();
|
||||
}
|
||||
})
|
||||
.catch((error) => this.handleError(error));
|
||||
.catch(error => this.handleError(error));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -122,20 +124,13 @@ export class ItemsSource {
|
||||
});
|
||||
};
|
||||
|
||||
toggleSorting = (orderByField) => {
|
||||
toggleSorting = orderByField => {
|
||||
this._sorter.toggleField(orderByField);
|
||||
this._savedOrderByField = this._sorter.field;
|
||||
this._changed({ sorting: true });
|
||||
};
|
||||
|
||||
setSorting = (orderByField, orderByReverse) => {
|
||||
this._sorter.setField(orderByField);
|
||||
this._sorter.setReverse(orderByReverse);
|
||||
this._savedOrderByField = this._sorter.field;
|
||||
this._changed({ sorting: true });
|
||||
};
|
||||
|
||||
updateSearch = (searchTerm) => {
|
||||
updateSearch = searchTerm => {
|
||||
// here we update state directly, but later `fetchData` will update it properly
|
||||
this._searchTerm = searchTerm;
|
||||
// in search mode ignore the ordering and use the ranking order
|
||||
@@ -150,7 +145,7 @@ export class ItemsSource {
|
||||
this._changed({ search: true, pagination: { page: true } });
|
||||
};
|
||||
|
||||
updateSelectedTags = (selectedTags) => {
|
||||
updateSelectedTags = selectedTags => {
|
||||
this._selectedTags = selectedTags;
|
||||
this._paginator.setPage(1);
|
||||
this._changed({ tags: true, pagination: { page: true } });
|
||||
@@ -158,7 +153,7 @@ export class ItemsSource {
|
||||
|
||||
update = () => this._changed();
|
||||
|
||||
handleError = (error) => {
|
||||
handleError = error => {
|
||||
if (isFunction(this.onError)) {
|
||||
this.onError(error);
|
||||
}
|
||||
@@ -177,7 +172,7 @@ export class ResourceItemsSource extends ItemsSource {
|
||||
processResults: (results, context) => {
|
||||
let processItem = getItemProcessor(context);
|
||||
processItem = isFunction(processItem) ? processItem : identity;
|
||||
return map(results, (item) => processItem(item, context));
|
||||
return map(results, item => processItem(item, context));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ export const Columns = {
|
||||
date(overrides) {
|
||||
return extend(
|
||||
{
|
||||
render: (text) => formatDate(text),
|
||||
render: text => formatDate(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -52,7 +52,7 @@ export const Columns = {
|
||||
dateTime(overrides) {
|
||||
return extend(
|
||||
{
|
||||
render: (text) => formatDateTime(text),
|
||||
render: text => formatDateTime(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -62,7 +62,7 @@ export const Columns = {
|
||||
{
|
||||
width: "1%",
|
||||
className: "text-nowrap",
|
||||
render: (text) => durationHumanize(text),
|
||||
render: text => durationHumanize(text),
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -70,7 +70,7 @@ export const Columns = {
|
||||
timeAgo(overrides, timeAgoCustomProps = undefined) {
|
||||
return extend(
|
||||
{
|
||||
render: (value) => <TimeAgo date={value} {...timeAgoCustomProps} />,
|
||||
render: value => <TimeAgo date={value} {...timeAgoCustomProps} />,
|
||||
},
|
||||
overrides
|
||||
);
|
||||
@@ -110,7 +110,6 @@ export default class ItemsTable extends React.Component {
|
||||
orderByField: PropTypes.string,
|
||||
orderByReverse: PropTypes.bool,
|
||||
toggleSorting: PropTypes.func,
|
||||
setSorting: PropTypes.func,
|
||||
"data-test": PropTypes.string,
|
||||
rowKey: PropTypes.oneOfType([PropTypes.string, PropTypes.func]),
|
||||
};
|
||||
@@ -128,15 +127,18 @@ export default class ItemsTable extends React.Component {
|
||||
};
|
||||
|
||||
prepareColumns() {
|
||||
const { orderByField, orderByReverse } = this.props;
|
||||
const { orderByField, orderByReverse, toggleSorting } = this.props;
|
||||
const orderByDirection = orderByReverse ? "descend" : "ascend";
|
||||
|
||||
return map(
|
||||
map(
|
||||
filter(this.props.columns, (column) => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
|
||||
(column) => extend(column, { orderByField: column.orderByField || column.field })
|
||||
filter(this.props.columns, column => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
|
||||
column => extend(column, { orderByField: column.orderByField || column.field })
|
||||
),
|
||||
(column, index) => {
|
||||
// Bind click events only to sortable columns
|
||||
const onHeaderCell = column.sorter ? () => ({ onClick: () => toggleSorting(column.orderByField) }) : null;
|
||||
|
||||
// Wrap render function to pass correct arguments
|
||||
const render = isFunction(column.render) ? (text, row) => column.render(text, row.item) : identity;
|
||||
|
||||
@@ -144,13 +146,14 @@ export default class ItemsTable extends React.Component {
|
||||
key: "column" + index,
|
||||
dataIndex: ["item", column.field],
|
||||
defaultSortOrder: column.orderByField === orderByField ? orderByDirection : null,
|
||||
onHeaderCell,
|
||||
render,
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
getRowKey = (record) => {
|
||||
getRowKey = record => {
|
||||
const { rowKey } = this.props;
|
||||
if (rowKey) {
|
||||
if (isFunction(rowKey)) {
|
||||
@@ -169,43 +172,22 @@ export default class ItemsTable extends React.Component {
|
||||
|
||||
// Bind events only if `onRowClick` specified
|
||||
const onTableRow = isFunction(this.props.onRowClick)
|
||||
? (row) => ({
|
||||
onClick: (event) => {
|
||||
? row => ({
|
||||
onClick: event => {
|
||||
this.props.onRowClick(event, row.item);
|
||||
},
|
||||
})
|
||||
: null;
|
||||
|
||||
const onChange = (pagination, filters, sorter, extra) => {
|
||||
const action = extra?.action;
|
||||
if (action === "sort") {
|
||||
const propsColumn = this.props.columns.find((column) => column.field === sorter.field[1]);
|
||||
if (!propsColumn.sorter) {
|
||||
return;
|
||||
}
|
||||
let orderByField = propsColumn.orderByField;
|
||||
const orderByReverse = sorter.order === "descend";
|
||||
|
||||
if (orderByReverse === undefined) {
|
||||
orderByField = null;
|
||||
}
|
||||
if (this.props.setSorting) {
|
||||
this.props.setSorting(orderByField, orderByReverse);
|
||||
} else {
|
||||
this.props.toggleSorting(orderByField);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const { showHeader } = this.props;
|
||||
if (this.props.loading) {
|
||||
if (isEmpty(tableDataProps.dataSource)) {
|
||||
tableDataProps.columns = tableDataProps.columns.map((column) => ({
|
||||
tableDataProps.columns = tableDataProps.columns.map(column => ({
|
||||
...column,
|
||||
sorter: false,
|
||||
render: () => <Skeleton active paragraph={false} />,
|
||||
}));
|
||||
tableDataProps.dataSource = range(10).map((key) => ({ key: `${key}` }));
|
||||
tableDataProps.dataSource = range(10).map(key => ({ key: `${key}` }));
|
||||
} else {
|
||||
tableDataProps.loading = { indicator: null };
|
||||
}
|
||||
@@ -218,7 +200,6 @@ export default class ItemsTable extends React.Component {
|
||||
rowKey={this.getRowKey}
|
||||
pagination={false}
|
||||
onRow={onTableRow}
|
||||
onChange={onChange}
|
||||
data-test={this.props["data-test"]}
|
||||
{...tableDataProps}
|
||||
/>
|
||||
|
||||
@@ -11,17 +11,17 @@
|
||||
> .layout-content {
|
||||
flex: 1 0 auto;
|
||||
width: 75%;
|
||||
order: 1;
|
||||
order: 0;
|
||||
margin: 0;
|
||||
padding: 0 0 0 @spacing
|
||||
}
|
||||
|
||||
> .layout-sidebar {
|
||||
flex: 0 0 auto;
|
||||
width: 25%;
|
||||
max-width: 350px;
|
||||
order: 0;
|
||||
order: 1;
|
||||
margin: 0;
|
||||
padding: 0 0 0 @spacing;
|
||||
}
|
||||
|
||||
@media (max-width: 990px) {
|
||||
@@ -31,7 +31,6 @@
|
||||
width: 100%;
|
||||
order: 1;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
> .layout-sidebar {
|
||||
@@ -39,6 +38,7 @@
|
||||
max-width: none;
|
||||
order: 0;
|
||||
margin: 0 0 @spacing 0;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,7 +65,6 @@ export const Query = PropTypes.shape({
|
||||
|
||||
export const AlertOptions = PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.oneOf(["first", "min", "max"]),
|
||||
op: PropTypes.oneOf([">", ">=", "<", "<=", "==", "!="]),
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
custom_subject: PropTypes.string,
|
||||
@@ -84,7 +83,6 @@ export const Alert = PropTypes.shape({
|
||||
query: Query,
|
||||
options: PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.string,
|
||||
op: PropTypes.string,
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
}).isRequired,
|
||||
|
||||
@@ -27,10 +27,6 @@ defineDummySnippets("sql");
|
||||
defineDummySnippets("json");
|
||||
defineDummySnippets("yaml");
|
||||
|
||||
// without this line, ace will try to load a non-existent mode-custom.js file
|
||||
// for data sources with syntax = "custom"
|
||||
ace.define("ace/mode/custom", [], () => {});
|
||||
|
||||
function buildTableColumnKeywords(table) {
|
||||
const keywords = [];
|
||||
table.columns.forEach(column => {
|
||||
|
||||
@@ -148,9 +148,7 @@ function EditVisualizationDialog({ dialog, visualization, query, queryResult })
|
||||
|
||||
function dismiss() {
|
||||
const optionsChanged = !isEqual(options, defaultState.originalOptions);
|
||||
confirmDialogClose(nameChanged || optionsChanged)
|
||||
.then(dialog.dismiss)
|
||||
.catch(() => {});
|
||||
confirmDialogClose(nameChanged || optionsChanged).then(dialog.dismiss);
|
||||
}
|
||||
|
||||
// When editing existing visualization chart type selector is disabled, so add only existing visualization's
|
||||
|
||||
@@ -59,7 +59,6 @@ function wrapComponentWithSettings(WrappedComponent) {
|
||||
"dateTimeFormat",
|
||||
"integerFormat",
|
||||
"floatFormat",
|
||||
"nullValue",
|
||||
"booleanValues",
|
||||
"tableCellMaxJSONSize",
|
||||
"allowCustomJSVisualizations",
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
export default {
|
||||
columns: 12, // grid columns count
|
||||
columns: 6, // grid columns count
|
||||
rowHeight: 50, // grid row height (incl. bottom padding)
|
||||
margins: 15, // widget margins
|
||||
mobileBreakPoint: 800,
|
||||
// defaults for widgets
|
||||
defaultSizeX: 6,
|
||||
defaultSizeX: 3,
|
||||
defaultSizeY: 3,
|
||||
minSizeX: 2,
|
||||
maxSizeX: 12,
|
||||
minSizeY: 2,
|
||||
minSizeX: 1,
|
||||
maxSizeX: 6,
|
||||
minSizeY: 1,
|
||||
maxSizeY: 1000,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" translate="no">
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta charset="UTF-8" />
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<base href="{{base_href}}" />
|
||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||
<script src="<%= htmlWebpackPlugin.options.staticPath %>unsupportedRedirect.js" async></script>
|
||||
<script src="/static/unsupportedRedirect.js" async></script>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/images/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/static/images/favicon-96x96.png" />
|
||||
|
||||
@@ -16,7 +16,6 @@ import MenuButton from "./components/MenuButton";
|
||||
import AlertView from "./AlertView";
|
||||
import AlertEdit from "./AlertEdit";
|
||||
import AlertNew from "./AlertNew";
|
||||
import notifications from "@/services/notifications";
|
||||
|
||||
const MODES = {
|
||||
NEW: 0,
|
||||
@@ -65,7 +64,6 @@ class Alert extends React.Component {
|
||||
this.setState({
|
||||
alert: {
|
||||
options: {
|
||||
selector: "first",
|
||||
op: ">",
|
||||
value: 1,
|
||||
muted: false,
|
||||
@@ -77,7 +75,7 @@ class Alert extends React.Component {
|
||||
} else {
|
||||
const { alertId } = this.props;
|
||||
AlertService.get({ id: alertId })
|
||||
.then((alert) => {
|
||||
.then(alert => {
|
||||
if (this._isMounted) {
|
||||
const canEdit = currentUser.canEdit(alert);
|
||||
|
||||
@@ -95,7 +93,7 @@ class Alert extends React.Component {
|
||||
this.onQuerySelected(alert.query);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
.catch(error => {
|
||||
if (this._isMounted) {
|
||||
this.props.onError(error);
|
||||
}
|
||||
@@ -114,7 +112,7 @@ class Alert extends React.Component {
|
||||
alert.rearm = pendingRearm || null;
|
||||
|
||||
return AlertService.save(alert)
|
||||
.then((alert) => {
|
||||
.then(alert => {
|
||||
notification.success("Saved.");
|
||||
navigateTo(`alerts/${alert.id}`, true);
|
||||
this.setState({ alert, mode: MODES.VIEW });
|
||||
@@ -124,7 +122,7 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
onQuerySelected = (query) => {
|
||||
onQuerySelected = query => {
|
||||
this.setState(({ alert }) => ({
|
||||
alert: Object.assign(alert, { query }),
|
||||
queryResult: null,
|
||||
@@ -132,7 +130,7 @@ class Alert extends React.Component {
|
||||
|
||||
if (query) {
|
||||
// get cached result for column names and values
|
||||
new QueryService(query).getQueryResultPromise().then((queryResult) => {
|
||||
new QueryService(query).getQueryResultPromise().then(queryResult => {
|
||||
if (this._isMounted) {
|
||||
this.setState({ queryResult });
|
||||
let { column } = this.state.alert.options;
|
||||
@@ -148,18 +146,18 @@ class Alert extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onNameChange = (name) => {
|
||||
onNameChange = name => {
|
||||
const { alert } = this.state;
|
||||
this.setState({
|
||||
alert: Object.assign(alert, { name }),
|
||||
});
|
||||
};
|
||||
|
||||
onRearmChange = (pendingRearm) => {
|
||||
onRearmChange = pendingRearm => {
|
||||
this.setState({ pendingRearm });
|
||||
};
|
||||
|
||||
setAlertOptions = (obj) => {
|
||||
setAlertOptions = obj => {
|
||||
const { alert } = this.state;
|
||||
const options = { ...alert.options, ...obj };
|
||||
this.setState({
|
||||
@@ -179,17 +177,6 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
evaluate = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.evaluate(alert)
|
||||
.then(() => {
|
||||
notification.success("Alert evaluated. Refresh page for updated status.");
|
||||
})
|
||||
.catch(() => {
|
||||
notifications.error("Failed to evaluate alert.");
|
||||
});
|
||||
};
|
||||
|
||||
mute = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.mute(alert)
|
||||
@@ -236,14 +223,7 @@ class Alert extends React.Component {
|
||||
const { queryResult, mode, canEdit, pendingRearm } = this.state;
|
||||
|
||||
const menuButton = (
|
||||
<MenuButton
|
||||
doDelete={this.delete}
|
||||
muted={muted}
|
||||
mute={this.mute}
|
||||
unmute={this.unmute}
|
||||
canEdit={canEdit}
|
||||
evaluate={this.evaluate}
|
||||
/>
|
||||
<MenuButton doDelete={this.delete} muted={muted} mute={this.mute} unmute={this.unmute} canEdit={canEdit} />
|
||||
);
|
||||
|
||||
const commonProps = {
|
||||
@@ -278,7 +258,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/new",
|
||||
title: "New Alert",
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -286,7 +266,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId",
|
||||
title: "Alert",
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -294,6 +274,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId/edit",
|
||||
title: "Alert",
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -68,23 +68,13 @@ export default class AlertView extends React.Component {
|
||||
<>
|
||||
<Title name={name} alert={alert}>
|
||||
<DynamicComponent name="AlertView.HeaderExtra" alert={alert} />
|
||||
{canEdit ? (
|
||||
<>
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</>
|
||||
) : (
|
||||
<Tooltip title="You do not have sufficient permissions to edit this alert">
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</Tooltip>
|
||||
)}
|
||||
<Tooltip title={canEdit ? "" : "You do not have sufficient permissions to edit this alert"}>
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</Tooltip>
|
||||
</Title>
|
||||
<div className="bg-white tiled p-20">
|
||||
<Grid.Row type="flex" gutter={16}>
|
||||
|
||||
@@ -54,74 +54,23 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
return null;
|
||||
})();
|
||||
|
||||
let columnHint;
|
||||
|
||||
if (alertOptions.selector === "first") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "max") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Max column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.max(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "min") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Min column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(
|
||||
Math.min(...resultValues.map((o) => Number(o[alertOptions.column])).filter((value) => !isNaN(value)))
|
||||
) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
}
|
||||
const columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
|
||||
return (
|
||||
<div data-test="Criteria">
|
||||
<div className="input-title">
|
||||
<span className="input-label">Selector</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.selector}
|
||||
onChange={(selector) => onChange({ selector })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 80 }}
|
||||
>
|
||||
<Select.Option value="first" label="first">
|
||||
first
|
||||
</Select.Option>
|
||||
<Select.Option value="min" label="min">
|
||||
min
|
||||
</Select.Option>
|
||||
<Select.Option value="max" label="max">
|
||||
max
|
||||
</Select.Option>
|
||||
</Select>
|
||||
) : (
|
||||
<DisabledInput minWidth={60}>{alertOptions.selector}</DisabledInput>
|
||||
)}
|
||||
</div>
|
||||
<div className="input-title">
|
||||
<span className="input-label">Value column</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.column}
|
||||
onChange={(column) => onChange({ column })}
|
||||
onChange={column => onChange({ column })}
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ minWidth: 100 }}
|
||||
>
|
||||
{columnNames.map((name) => (
|
||||
style={{ minWidth: 100 }}>
|
||||
{columnNames.map(name => (
|
||||
<Select.Option key={name}>{name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
@@ -134,11 +83,10 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.op}
|
||||
onChange={(op) => onChange({ op })}
|
||||
onChange={op => onChange({ op })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 55 }}
|
||||
>
|
||||
style={{ width: 55 }}>
|
||||
<Select.Option value=">" label={CONDITIONS[">"]}>
|
||||
{CONDITIONS[">"]} greater than
|
||||
</Select.Option>
|
||||
@@ -177,7 +125,7 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
id="threshold-criterion"
|
||||
style={{ width: 90 }}
|
||||
value={alertOptions.value}
|
||||
onChange={(e) => onChange({ value: e.target.value })}
|
||||
onChange={e => onChange({ value: e.target.value })}
|
||||
/>
|
||||
) : (
|
||||
<DisabledInput minWidth={50}>{alertOptions.value}</DisabledInput>
|
||||
|
||||
@@ -11,7 +11,7 @@ import LoadingOutlinedIcon from "@ant-design/icons/LoadingOutlined";
|
||||
import EllipsisOutlinedIcon from "@ant-design/icons/EllipsisOutlined";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate, muted }) {
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const execute = useCallback(action => {
|
||||
@@ -55,9 +55,6 @@ export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate,
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={confirmDelete}>Delete</PlainButton>
|
||||
</Menu.Item>
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={() => execute(evaluate)}>Evaluate</PlainButton>
|
||||
</Menu.Item>
|
||||
</Menu>
|
||||
}>
|
||||
<Button aria-label="More actions">
|
||||
@@ -72,7 +69,6 @@ MenuButton.propTypes = {
|
||||
canEdit: PropTypes.bool.isRequired,
|
||||
mute: PropTypes.func.isRequired,
|
||||
unmute: PropTypes.func.isRequired,
|
||||
evaluate: PropTypes.func.isRequired,
|
||||
muted: PropTypes.bool,
|
||||
};
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ import { DashboardTagsControl } from "@/components/tags-control/TagsControl";
|
||||
import getTags from "@/services/getTags";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import { policy } from "@/services/policy";
|
||||
import recordEvent from "@/services/recordEvent";
|
||||
import { durationHumanize } from "@/lib/utils";
|
||||
import { DashboardStatusEnum } from "../hooks/useDashboard";
|
||||
|
||||
@@ -119,8 +118,6 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
|
||||
managePermissions,
|
||||
gridDisabled,
|
||||
isDashboardOwnerOrAdmin,
|
||||
isDuplicating,
|
||||
duplicateDashboard,
|
||||
} = dashboardConfiguration;
|
||||
|
||||
const archive = () => {
|
||||
@@ -144,14 +141,6 @@ function DashboardMoreOptionsButton({ dashboardConfiguration }) {
|
||||
<Menu.Item className={cx({ hidden: gridDisabled })}>
|
||||
<PlainButton onClick={() => setEditingLayout(true)}>Edit</PlainButton>
|
||||
</Menu.Item>
|
||||
{!isDuplicating && dashboard.canEdit() && (
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={duplicateDashboard}>
|
||||
Fork <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</PlainButton>
|
||||
</Menu.Item>
|
||||
)}
|
||||
{clientConfig.showPermissionsControl && isDashboardOwnerOrAdmin && (
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={managePermissions}>Manage Permissions</PlainButton>
|
||||
@@ -186,7 +175,6 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
|
||||
fullscreen,
|
||||
toggleFullscreen,
|
||||
showShareDashboardDialog,
|
||||
updateDashboard,
|
||||
} = dashboardConfiguration;
|
||||
const showPublishButton = dashboard.is_draft;
|
||||
const showRefreshButton = true;
|
||||
@@ -194,14 +182,8 @@ function DashboardControl({ dashboardConfiguration, headerExtra }) {
|
||||
const canShareDashboard = canEditDashboard && !dashboard.is_draft;
|
||||
const showShareButton = !clientConfig.disablePublicUrls && (dashboard.publicAccessEnabled || canShareDashboard);
|
||||
const showMoreOptionsButton = canEditDashboard;
|
||||
|
||||
const unarchiveDashboard = () => {
|
||||
recordEvent("unarchive", "dashboard", dashboard.id);
|
||||
updateDashboard({ is_archived: false }, false);
|
||||
};
|
||||
return (
|
||||
<div className="dashboard-control">
|
||||
{dashboard.can_edit && dashboard.is_archived && <Button onClick={unarchiveDashboard}>Unarchive</Button>}
|
||||
{!dashboard.is_archived && (
|
||||
<span className="hidden-print">
|
||||
{showPublishButton && (
|
||||
|
||||
@@ -94,12 +94,12 @@ class ShareDashboardDialog extends React.Component {
|
||||
};
|
||||
|
||||
render() {
|
||||
const { dialog, dashboard, hasOnlySafeQueries } = this.props;
|
||||
const headerContent = this.constructor.headerContent;
|
||||
const { dialog, dashboard } = this.props;
|
||||
|
||||
return (
|
||||
<Modal {...dialog.props} title={headerContent} footer={null}>
|
||||
<Modal {...dialog.props} title={this.constructor.headerContent} footer={null}>
|
||||
<Form layout="horizontal">
|
||||
{!hasOnlySafeQueries && (
|
||||
{!this.props.hasOnlySafeQueries && (
|
||||
<Form.Item>
|
||||
<Alert
|
||||
message="For your security, sharing is currently not supported for dashboards containing queries with text parameters. Consider changing the text parameters in your query to a different type."
|
||||
@@ -107,7 +107,6 @@ class ShareDashboardDialog extends React.Component {
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
|
||||
<Form.Item label="Allow public access" {...this.formItemProps}>
|
||||
<Switch
|
||||
checked={dashboard.publicAccessEnabled}
|
||||
|
||||
@@ -6,6 +6,11 @@ div.tags-list {
|
||||
-ms-user-select: none; /* IE10+ */
|
||||
}
|
||||
|
||||
.page-dashboard-list .page-header-actions {
|
||||
width: 25%; /* same as sidebar */
|
||||
max-width: 350px; /* same as sidebar */
|
||||
}
|
||||
|
||||
/* same rule as for sidebar */
|
||||
@media (max-width: 990px) {
|
||||
.page-dashboard-list .page-header-actions {
|
||||
|
||||
@@ -15,7 +15,6 @@ import ShareDashboardDialog from "../components/ShareDashboardDialog";
|
||||
import useFullscreenHandler from "../../../lib/hooks/useFullscreenHandler";
|
||||
import useRefreshRateHandler from "./useRefreshRateHandler";
|
||||
import useEditModeHandler from "./useEditModeHandler";
|
||||
import useDuplicateDashboard from "./useDuplicateDashboard";
|
||||
import { policy } from "@/services/policy";
|
||||
|
||||
export { DashboardStatusEnum } from "./useEditModeHandler";
|
||||
@@ -54,8 +53,6 @@ function useDashboard(dashboardData) {
|
||||
[dashboard]
|
||||
);
|
||||
|
||||
const [isDuplicating, duplicateDashboard] = useDuplicateDashboard(dashboard);
|
||||
|
||||
const managePermissions = useCallback(() => {
|
||||
const aclUrl = `api/dashboards/${dashboard.id}/acl`;
|
||||
PermissionsEditorDialog.showModal({
|
||||
@@ -246,8 +243,6 @@ function useDashboard(dashboardData) {
|
||||
showAddTextboxDialog,
|
||||
showAddWidgetDialog,
|
||||
managePermissions,
|
||||
isDuplicating,
|
||||
duplicateDashboard,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
import { noop, extend, pick } from "lodash";
|
||||
import { useCallback, useState } from "react";
|
||||
import url from "url";
|
||||
import qs from "query-string";
|
||||
import { Dashboard } from "@/services/dashboard";
|
||||
|
||||
function keepCurrentUrlParams(targetUrl) {
|
||||
const currentUrlParams = qs.parse(window.location.search);
|
||||
targetUrl = url.parse(targetUrl);
|
||||
const targetUrlParams = qs.parse(targetUrl.search);
|
||||
return url.format(
|
||||
extend(pick(targetUrl, ["protocol", "auth", "host", "pathname"]), {
|
||||
search: qs.stringify(extend(currentUrlParams, targetUrlParams)),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export default function useDuplicateDashboard(dashboard) {
|
||||
const [isDuplicating, setIsDuplicating] = useState(false);
|
||||
|
||||
const duplicateDashboard = useCallback(() => {
|
||||
// To prevent opening the same tab, name must be unique for each browser
|
||||
const tabName = `duplicatedDashboardTab/${Math.random().toString()}`;
|
||||
|
||||
// We should open tab here because this moment is a part of user interaction;
|
||||
// later browser will block such attempts
|
||||
const tab = window.open("", tabName);
|
||||
|
||||
setIsDuplicating(true);
|
||||
Dashboard.fork({ id: dashboard.id })
|
||||
.then(newDashboard => {
|
||||
tab.location = keepCurrentUrlParams(newDashboard.getUrl());
|
||||
})
|
||||
.finally(() => {
|
||||
setIsDuplicating(false);
|
||||
});
|
||||
}, [dashboard.id]);
|
||||
|
||||
return [isDuplicating, isDuplicating ? noop : duplicateDashboard];
|
||||
}
|
||||
@@ -31,8 +31,7 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
<Link
|
||||
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
rel="noopener noreferrer">
|
||||
Read more
|
||||
</Link>
|
||||
.
|
||||
@@ -44,7 +43,7 @@ function DeprecatedEmbedFeatureAlert() {
|
||||
|
||||
function EmailNotVerifiedAlert() {
|
||||
const verifyEmail = () => {
|
||||
axios.post("verification_email/").then((data) => {
|
||||
axios.post("verification_email/").then(data => {
|
||||
notification.success(data.message);
|
||||
});
|
||||
};
|
||||
@@ -101,6 +100,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/",
|
||||
title: "Redash",
|
||||
render: (pageProps) => <Home {...pageProps} />,
|
||||
render: pageProps => <Home {...pageProps} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -160,15 +160,14 @@ function QueriesList({ controller }) {
|
||||
orderByField={controller.orderByField}
|
||||
orderByReverse={controller.orderByReverse}
|
||||
toggleSorting={controller.toggleSorting}
|
||||
setSorting={controller.setSorting}
|
||||
/>
|
||||
<Paginator
|
||||
showPageSizeSelect
|
||||
totalCount={controller.totalItemsCount}
|
||||
pageSize={controller.itemsPerPage}
|
||||
onPageSizeChange={(itemsPerPage) => controller.updatePagination({ itemsPerPage })}
|
||||
onPageSizeChange={itemsPerPage => controller.updatePagination({ itemsPerPage })}
|
||||
page={controller.page}
|
||||
onChange={(page) => controller.updatePagination({ page })}
|
||||
onChange={page => controller.updatePagination({ page })}
|
||||
/>
|
||||
</div>
|
||||
</React.Fragment>
|
||||
@@ -197,7 +196,7 @@ const QueriesListPage = itemsList(
|
||||
}[currentPage];
|
||||
},
|
||||
getItemProcessor() {
|
||||
return (item) => new Query(item);
|
||||
return item => new Query(item);
|
||||
},
|
||||
}),
|
||||
() => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true })
|
||||
@@ -208,7 +207,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries",
|
||||
title: "Queries",
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="all" />,
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="all" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -216,7 +215,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/favorites",
|
||||
title: "Favorite Queries",
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="favorites" />,
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="favorites" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -224,7 +223,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/archive",
|
||||
title: "Archived Queries",
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="archive" />,
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="archive" />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -232,6 +231,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/queries/my",
|
||||
title: "My Queries",
|
||||
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="my" />,
|
||||
render: pageProps => <QueriesListPage {...pageProps} currentPage="my" />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
height: 35px;
|
||||
}
|
||||
|
||||
.page-queries-list .page-header-actions {
|
||||
width: 25%; /* same as sidebar */
|
||||
max-width: 350px; /* same as sidebar */
|
||||
}
|
||||
|
||||
/* same rule as for sidebar */
|
||||
@media (max-width: 990px) {
|
||||
|
||||
@@ -134,10 +134,11 @@ function QuerySource(props) {
|
||||
// choose data source id for new queries
|
||||
if (dataSourcesLoaded && queryFlags.isNew) {
|
||||
const firstDataSourceId = dataSources.length > 0 ? dataSources[0].id : null;
|
||||
const selectedDataSourceId = parseInt(localStorage.getItem("lastSelectedDataSourceId")) || null;
|
||||
|
||||
handleDataSourceChange(
|
||||
chooseDataSourceId([query.data_source_id, selectedDataSourceId, firstDataSourceId], dataSources)
|
||||
chooseDataSourceId(
|
||||
[query.data_source_id, localStorage.getItem("lastSelectedDataSourceId"), firstDataSourceId],
|
||||
dataSources
|
||||
)
|
||||
);
|
||||
}
|
||||
}, [query.data_source_id, queryFlags.isNew, dataSourcesLoaded, dataSources, handleDataSourceChange]);
|
||||
|
||||
@@ -37,10 +37,9 @@
|
||||
|
||||
&.active {
|
||||
overflow: visible;
|
||||
max-height: unset !important;
|
||||
.ant-input {
|
||||
resize: vertical;
|
||||
height: 30vh;
|
||||
max-height: 150px - 15px * 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import QueryControlDropdown from "@/components/EditVisualizationButton/QueryCont
|
||||
import EditVisualizationButton from "@/components/EditVisualizationButton";
|
||||
import useQueryResultData from "@/lib/useQueryResultData";
|
||||
import { durationHumanize, pluralize, prettySize } from "@/lib/utils";
|
||||
import { isUndefined } from "lodash";
|
||||
|
||||
import "./QueryExecutionMetadata.less";
|
||||
|
||||
@@ -52,8 +51,7 @@ export default function QueryExecutionMetadata({
|
||||
"Result truncated to " +
|
||||
queryResultData.rows.length +
|
||||
" rows. Databricks may truncate query results that are unstably large."
|
||||
}
|
||||
>
|
||||
}>
|
||||
<WarningTwoTone twoToneColor="#FF9800" />
|
||||
</Tooltip>
|
||||
</span>
|
||||
@@ -69,9 +67,10 @@ export default function QueryExecutionMetadata({
|
||||
)}
|
||||
{isQueryExecuting && <span>Running…</span>}
|
||||
</span>
|
||||
{!isUndefined(queryResultData.metadata.data_scanned) && !isQueryExecuting && (
|
||||
{queryResultData.metadata.data_scanned && (
|
||||
<span className="m-l-5">
|
||||
Data Scanned <strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
Data Scanned
|
||||
<strong>{prettySize(queryResultData.metadata.data_scanned)}</strong>
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
|
||||
@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
|
||||
import React from "react";
|
||||
|
||||
export function QuerySourceTypeIcon(props) {
|
||||
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||
}
|
||||
|
||||
QuerySourceTypeIcon.propTypes = {
|
||||
|
||||