mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
51 Commits
24.05.0-de
...
24.08.1-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1a60bf6d2 | ||
|
|
72203655ec | ||
|
|
5257e39282 | ||
|
|
ec70ff4408 | ||
|
|
ed8c05f634 | ||
|
|
86b75db82e | ||
|
|
660d04b0f1 | ||
|
|
fc1e1f7a01 | ||
|
|
8725fa4737 | ||
|
|
ea0b3cbe3a | ||
|
|
714b950fde | ||
|
|
a9c9f085af | ||
|
|
a69f7fb2fe | ||
|
|
c244e75352 | ||
|
|
80f7ba1b91 | ||
|
|
d2745e5acc | ||
|
|
4114227471 | ||
|
|
8fc4ce1494 | ||
|
|
ebb0e2c9ad | ||
|
|
57a79bc96b | ||
|
|
77f108dd09 | ||
|
|
dd1a9b96da | ||
|
|
d9282b2688 | ||
|
|
28c39219af | ||
|
|
a37ef3b235 | ||
|
|
0056aa68f8 | ||
|
|
76b5a30fd9 | ||
|
|
db4fdd003e | ||
|
|
4cb32fc1c3 | ||
|
|
a6c728b99c | ||
|
|
01e036d0a9 | ||
|
|
17fe69f551 | ||
|
|
bceaab0496 | ||
|
|
70dd05916f | ||
|
|
60a12e906e | ||
|
|
ec051a8939 | ||
|
|
60d3c66a8b | ||
|
|
bd4ba96c43 | ||
|
|
10a46fd33c | ||
|
|
c874eb6b11 | ||
|
|
f3a323695f | ||
|
|
408ba78bd0 | ||
|
|
58cc49bc88 | ||
|
|
753ea846ff | ||
|
|
1b946b59ec | ||
|
|
4569191113 | ||
|
|
62890c3ec4 | ||
|
|
bd115e7f5f | ||
|
|
bd17662005 | ||
|
|
b7f22b1896 | ||
|
|
897c683980 |
25
.ci/compose.ci.yaml
Normal file
25
.ci/compose.ci.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
services:
|
||||
redash:
|
||||
build: ../
|
||||
command: manage version
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5000:5000"
|
||||
environment:
|
||||
PYTHONUNBUFFERED: 0
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
73
.ci/compose.cypress.yaml
Normal file
73
.ci/compose.cypress.yaml
Normal file
@@ -0,0 +1,73 @@
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: ../
|
||||
args:
|
||||
install_groups: "main"
|
||||
code_coverage: ${CODE_COVERAGE}
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
services:
|
||||
server:
|
||||
<<: *redash-service
|
||||
command: server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5000:5000"
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
scheduler:
|
||||
<<: *redash-service
|
||||
command: scheduler
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
worker:
|
||||
<<: *redash-service
|
||||
command: worker
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
cypress:
|
||||
ipc: host
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: .ci/Dockerfile.cypress
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
- scheduler
|
||||
environment:
|
||||
CYPRESS_baseUrl: "http://server:5000"
|
||||
CYPRESS_coverage: ${CODE_COVERAGE}
|
||||
PERCY_TOKEN: ${PERCY_TOKEN}
|
||||
PERCY_BRANCH: ${CIRCLE_BRANCH}
|
||||
PERCY_COMMIT: ${CIRCLE_SHA1}
|
||||
PERCY_PULL_REQUEST: ${CIRCLE_PR_NUMBER}
|
||||
COMMIT_INFO_BRANCH: ${CIRCLE_BRANCH}
|
||||
COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE}
|
||||
COMMIT_INFO_AUTHOR: ${CIRCLE_USERNAME}
|
||||
COMMIT_INFO_SHA: ${CIRCLE_SHA1}
|
||||
COMMIT_INFO_REMOTE: ${CIRCLE_REPOSITORY_URL}
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY}
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
39
.ci/docker_build
Executable file
39
.ci/docker_build
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script only needs to run on the main Redash repo
|
||||
|
||||
if [ "${GITHUB_REPOSITORY}" != "getredash/redash" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the main Redash repository"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${GITHUB_REF_NAME}" != "master" ] && [ "${GITHUB_REF_NAME}" != "preview-image" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the 'master' nor 'preview-image' branch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "x${DOCKER_USER}" = "x" ] || [ "x${DOCKER_PASS}" = "x" ]; then
|
||||
echo "Skipping image build for Docker Hub, as the login details aren't available"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
set -e
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG="$VERSION.b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}"
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
|
||||
docker login -u "${DOCKER_USER}" -p "${DOCKER_PASS}"
|
||||
|
||||
DOCKERHUB_REPO="redash/redash"
|
||||
DOCKER_TAGS="-t redash/redash:preview -t redash/preview:${VERSION_TAG}"
|
||||
|
||||
# Build the docker container
|
||||
docker build --build-arg install_groups="main,all_ds,dev" ${DOCKER_TAGS} .
|
||||
|
||||
# Push the container to the preview build locations
|
||||
docker push "${DOCKERHUB_REPO}:preview"
|
||||
docker push "redash/preview:${VERSION_TAG}"
|
||||
|
||||
echo "Built: ${VERSION_TAG}"
|
||||
9
.ci/pack
Executable file
9
.ci/pack
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
NAME=redash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
|
||||
FILENAME=$NAME.$FULL_VERSION.tar.gz
|
||||
|
||||
mkdir -p /tmp/artifacts/
|
||||
|
||||
tar -zcv -f /tmp/artifacts/$FILENAME --exclude=".git" --exclude="optipng*" --exclude="cypress" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" *
|
||||
6
.ci/update_version
Executable file
6
.ci/update_version
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=${VERSION}+b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}
|
||||
|
||||
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py
|
||||
sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json
|
||||
79
.github/workflows/ci.yml
vendored
79
.github/workflows/ci.yml
vendored
@@ -3,29 +3,12 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
env:
|
||||
CYPRESS_COVERAGE: "true"
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
REDASH_COOKIE_SECRET: 2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF
|
||||
REDASH_SECRET_KEY: 2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
FRONTEND_BUILD_MODE: 1
|
||||
INSTALL_GROUPS: main,all_ds,dev
|
||||
PERCY_BRANCH: ${{github.head_ref || github.ref_name}}
|
||||
PERCY_COMMIT: ${{github.sha}}
|
||||
PERCY_PULL_REQUEST: ${{github.event.number}}
|
||||
COMMIT_INFO_BRANCH: ${{github.head_ref || github.ref_name}}
|
||||
COMMIT_INFO_MESSAGE: ${{github.event.head_commit.message}}
|
||||
COMMIT_INFO_AUTHOR: ${{github.event.pull_request.user.login}}
|
||||
COMMIT_INFO_SHA: ${{github.sha}}
|
||||
COMMIT_INFO_REMOTE: ${{github.server_url}}/${{github.repository}}
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
@@ -40,7 +23,7 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install black==24.3.0 ruff==0.1.9
|
||||
- run: sudo pip install black==23.1.0 ruff==0.0.287
|
||||
- run: ruff check .
|
||||
- run: black --check .
|
||||
|
||||
@@ -48,7 +31,10 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
FRONTEND_BUILD_MODE: 0
|
||||
COMPOSE_FILE: .ci/compose.ci.yaml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
@@ -60,16 +46,15 @@ jobs:
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
touch .env
|
||||
docker compose build
|
||||
docker compose build --build-arg install_groups="main,all_ds,dev" --build-arg skip_frontend_build=true
|
||||
docker compose up -d
|
||||
sleep 10
|
||||
- name: Create Test Database
|
||||
run: docker compose run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
run: docker compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- name: List Enabled Query Runners
|
||||
run: docker compose run --rm server manage ds list_types
|
||||
run: docker compose -p redash run --rm redash manage ds list_types
|
||||
- name: Run Tests
|
||||
run: docker compose run --name tests server tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
run: docker compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
- name: Copy Test Results
|
||||
run: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
@@ -77,17 +62,15 @@ jobs:
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: backend-test-results
|
||||
name: test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: backend-coverage
|
||||
name: coverage
|
||||
path: coverage.xml
|
||||
|
||||
frontend-lint:
|
||||
@@ -107,14 +90,13 @@ jobs:
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: frontend-test-results
|
||||
name: test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
@@ -135,24 +117,21 @@ jobs:
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
- name: Run Visualizations Tests
|
||||
run: |
|
||||
cd viz-lib
|
||||
yarn test
|
||||
run: cd viz-lib && yarn test
|
||||
- run: yarn lint
|
||||
|
||||
frontend-e2e-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.cypress.yaml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
INSTALL_GROUPS: main
|
||||
COMPOSE_PROFILES: e2e
|
||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
@@ -168,16 +147,17 @@ jobs:
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Enable Code Coverage Report For Master Branch
|
||||
if: endsWith(github.ref, '/master')
|
||||
run: |
|
||||
echo "CODE_COVERAGE=true" >> "$GITHUB_ENV"
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
set -x
|
||||
touch .env
|
||||
yarn build
|
||||
yarn cypress build
|
||||
yarn cypress start -- --skip-db-seed
|
||||
docker compose run cypress yarn cypress db-seed
|
||||
@@ -189,12 +169,7 @@ jobs:
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: frontend-coverage
|
||||
name: coverage
|
||||
path: coverage
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend
|
||||
path: client/dist
|
||||
retention-days: 1
|
||||
|
||||
22
.github/workflows/periodic-snapshot.yml
vendored
22
.github/workflows/periodic-snapshot.yml
vendored
@@ -3,7 +3,7 @@ name: Periodic Snapshot
|
||||
# 10 minutes after midnight on the first of every month
|
||||
on:
|
||||
schedule:
|
||||
- cron: "10 0 1 * *"
|
||||
- cron: '10 0 1 * *'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -16,13 +16,15 @@ jobs:
|
||||
with:
|
||||
ssh-key: ${{secrets.ACTION_PUSH_KEY}}
|
||||
- run: |
|
||||
date="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=$date '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
# https://api.github.com/users/github-actions[bot]
|
||||
git config user.name 'github-actions[bot]'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${date}"
|
||||
git tag $date
|
||||
git push --atomic origin master refs/tags/$date
|
||||
git commit -m "Snapshot: ${TAG_NAME}"
|
||||
git tag ${TAG_NAME}
|
||||
git push --atomic origin master refs/tags/${TAG_NAME}
|
||||
|
||||
135
.github/workflows/preview-image.yml
vendored
135
.github/workflows/preview-image.yml
vendored
@@ -1,20 +1,15 @@
|
||||
name: Preview Image
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- Tests
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- master
|
||||
push:
|
||||
tags:
|
||||
- '*-dev'
|
||||
|
||||
env:
|
||||
DOCKER_REPO: redash
|
||||
NODE_VERSION: 18
|
||||
|
||||
jobs:
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
@@ -37,118 +32,62 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.VERSION_TAG }}
|
||||
repo: ${{ steps.version.outputs.DOCKER_REPO }}
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.push.after }}
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
name: frontend
|
||||
workflow: ci.yml
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
path: client/dist
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=${VERSION}-b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}
|
||||
sed -ri "s/^__version__ = ([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py
|
||||
sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json
|
||||
echo "VERSION_TAG=$FULL_VERSION" >> "$GITHUB_OUTPUT"
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
echo "SCOPE=${platform//\//-}" >> $GITHUB_ENV
|
||||
if [[ "${{ vars.DOCKER_REPO }}" != "" ]]; then
|
||||
echo "DOCKER_REPO=${{ vars.DOCKER_REPO }}" >> $GITHUB_ENV
|
||||
echo "DOCKER_REPO=${{ vars.DOCKER_REPO }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "DOCKER_REPO=${DOCKER_REPO}" >> $GITHUB_ENV
|
||||
echo "DOCKER_REPO=${DOCKER_REPO}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
.ci/update_version
|
||||
VERSION_TAG=$(jq -r .version package.json)
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# TODO: We can use GitHub Actions's matrix option to reduce the build time.
|
||||
- name: Build and push preview image to Docker Hub
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
cache-from: type=gha,scope=${{ env.SCOPE }}
|
||||
cache-to: type=gha,mode=max,scope=${{ env.SCOPE }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,name=${{ env.DOCKER_REPO }}/redash,push-by-digest=true,name-canonical=true,push=true
|
||||
build-args: |
|
||||
FRONTEND_BUILD_MODE=1
|
||||
test_all_deps=true
|
||||
cache-from: type=gha,scope=multi-platform
|
||||
cache-to: type=gha,mode=max,scope=multi-platform
|
||||
platforms: linux/amd64,linux/arm64
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
publish-docker-manifest:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
- build-docker-image
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: digests-*
|
||||
path: /tmp/digests
|
||||
merge-multiple: true
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ needs.build-docker-image.outputs.repo }}/redash
|
||||
tags: preview
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ needs.build-docker-image.outputs.repo }}/redash@sha256:%s ' *)
|
||||
- name: Inspect image
|
||||
run: |
|
||||
REDASH_IMAGE="${{ needs.build-docker-image.outputs.repo }}/redash:${{ steps.meta.outputs.version }}"
|
||||
docker buildx imagetools inspect $REDASH_IMAGE
|
||||
- name: Push image ${{ needs.build-docker-image.outputs.repo }}/preview image
|
||||
run: |
|
||||
REDASH_IMAGE="${{ needs.build-docker-image.outputs.repo }}/redash:preview"
|
||||
PREVIEW_IMAGE="${{ needs.build-docker-image.outputs.repo }}/preview:${{ needs.build-docker-image.outputs.version }}"
|
||||
docker buildx imagetools create --tag $PREVIEW_IMAGE $REDASH_IMAGE
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -17,6 +17,7 @@ client/dist
|
||||
_build
|
||||
.vscode
|
||||
.env
|
||||
.tool-versions
|
||||
|
||||
dump.rdb
|
||||
|
||||
|
||||
@@ -38,7 +38,9 @@ request_review: author
|
||||
#
|
||||
# These can be used to tell other automation to avoid our PRs.
|
||||
#
|
||||
labels: ["Skip CI"]
|
||||
labels:
|
||||
- restyled
|
||||
- "Skip CI"
|
||||
|
||||
# Labels to ignore
|
||||
#
|
||||
@@ -50,13 +52,13 @@ labels: ["Skip CI"]
|
||||
# Restylers to run, and how
|
||||
restylers:
|
||||
- name: black
|
||||
image: restyled/restyler-black:v19.10b0
|
||||
image: restyled/restyler-black:v24.4.2
|
||||
include:
|
||||
- redash
|
||||
- tests
|
||||
- migrations/versions
|
||||
- name: prettier
|
||||
image: restyled/restyler-prettier:v1.19.1-2
|
||||
image: restyled/restyler-prettier:v3.3.2-2
|
||||
command:
|
||||
- prettier
|
||||
- --write
|
||||
|
||||
60
Dockerfile
60
Dockerfile
@@ -1,37 +1,35 @@
|
||||
# Controls whether to build the frontend assets
|
||||
ARG FRONTEND_BUILD_MODE=0
|
||||
FROM node:18-bookworm AS frontend-builder
|
||||
|
||||
# MODE 0: create empty files. useful for backend tests
|
||||
FROM alpine:3.19 as frontend-builder-0
|
||||
RUN \
|
||||
mkdir -p /frontend/client/dist && \
|
||||
touch /frontend/client/dist/multi_org.html && \
|
||||
touch /frontend/client/dist/index.html
|
||||
|
||||
# MODE 1: copy static frontend from host, useful for CI to ignore building static content multiple times
|
||||
FROM alpine:3.19 as frontend-builder-1
|
||||
COPY client/dist /frontend/client/dist
|
||||
|
||||
# MODE 2: build static content in docker, can be used for a local development
|
||||
FROM node:18-bookworm as frontend-builder-2
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
|
||||
ENV CYPRESS_INSTALL_BINARY=0
|
||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||
|
||||
RUN useradd -m -d /frontend redash
|
||||
USER redash
|
||||
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
COPY --chown=redash scripts /frontend/scripts
|
||||
|
||||
RUN yarn --frozen-lockfile --network-concurrency 1;
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
# Avoid issues caused by lags in disk and network I/O speeds when working on top of QEMU emulation for multi-platform image building.
|
||||
RUN yarn config set network-timeout 300000
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN yarn build
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
|
||||
FROM frontend-builder-${FRONTEND_BUILD_MODE} as frontend-builder
|
||||
|
||||
FROM python:3.8-slim-bookworm
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
@@ -66,18 +64,17 @@ RUN apt-get update && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN \
|
||||
curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg && \
|
||||
apt update && \
|
||||
ACCEPT_EULA=Y apt install -y --no-install-recommends msodbcsql18 && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG databricks_odbc_driver_url=https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/odbc/2.6.26/SimbaSparkODBC-2.6.26.1045-Debian-64bit.zip
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg \
|
||||
&& curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
&& chmod 600 /tmp/simba_odbc.zip \
|
||||
&& unzip /tmp/simba_odbc.zip -d /tmp/simba \
|
||||
&& dpkg -i /tmp/simba/*.deb \
|
||||
@@ -92,13 +89,16 @@ ENV POETRY_HOME=/etc/poetry
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
# Avoid crashes, including corrupted cache artifacts, when building multi-platform images with GitHub Actions.
|
||||
RUN /etc/poetry/bin/poetry cache clear pypi --all
|
||||
|
||||
COPY pyproject.toml poetry.lock ./
|
||||
|
||||
ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi"
|
||||
# for LDAP authentication, install with `ldap3` group
|
||||
# disabled by default due to GPL license conflict
|
||||
ARG INSTALL_GROUPS="main,all_ds,dev"
|
||||
RUN /etc/poetry/bin/poetry install --only $INSTALL_GROUPS $POETRY_OPTIONS
|
||||
ARG install_groups="main,all_ds,dev"
|
||||
RUN /etc/poetry/bin/poetry install --only $install_groups $POETRY_OPTIONS
|
||||
|
||||
COPY --chown=redash . /app
|
||||
COPY --from=frontend-builder --chown=redash /frontend/client/dist /app/client/dist
|
||||
|
||||
24
Makefile
24
Makefile
@@ -1,18 +1,10 @@
|
||||
.PHONY: compose_build up test_db create_database create_db clean clean-all down tests lint backend-unit-tests frontend-unit-tests pydeps test build watch start redis-cli bash
|
||||
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_PROFILES=local
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build: .env
|
||||
docker compose build
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
|
||||
up:
|
||||
docker compose up -d redis postgres
|
||||
docker compose exec -u postgres postgres psql postgres --csv \
|
||||
-1tqc "SELECT table_name FROM information_schema.tables WHERE table_name = 'organizations'" 2> /dev/null \
|
||||
| grep -q "organizations" || make create_database
|
||||
docker compose up -d --build
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
@@ -21,11 +13,9 @@ test_db:
|
||||
done
|
||||
docker compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
|
||||
create_db: .env
|
||||
create_database: .env
|
||||
docker compose run server create_db
|
||||
|
||||
create_database: create_db
|
||||
|
||||
clean:
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
@@ -54,12 +44,6 @@ env: .env
|
||||
format:
|
||||
pre-commit run --all-files
|
||||
|
||||
pydeps:
|
||||
pip3 install wheel
|
||||
pip3 install --upgrade black ruff launchpadlib pip setuptools
|
||||
pip3 install poetry
|
||||
poetry install --only main,all_ds,dev
|
||||
|
||||
tests:
|
||||
docker compose run server tests
|
||||
|
||||
|
||||
@@ -1,48 +1,25 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
if [ -z $REDASH_REDIS_URL ]; then
|
||||
export REDASH_REDIS_URL=redis://:${REDASH_REDIS_PASSWORD}@${REDASH_REDIS_HOSTNAME}:${REDASH_REDIS_PORT}/${REDASH_REDIS_NAME}
|
||||
fi
|
||||
|
||||
if [ -z $REDASH_DATABASE_URL ]; then
|
||||
export REDASH_DATABASE_URL=postgresql://${REDASH_DATABASE_USER}:${REDASH_DATABASE_PASSWORD}@${REDASH_DATABASE_HOSTNAME}:${REDASH_DATABASE_PORT}/${REDASH_DATABASE_NAME}
|
||||
fi
|
||||
|
||||
scheduler() {
|
||||
echo "Starting RQ scheduler..."
|
||||
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting RQ scheduler in production mode"
|
||||
exec ./manage.py rq scheduler
|
||||
;;
|
||||
*)
|
||||
echo "Starting RQ scheduler in dev mode"
|
||||
exec watchmedo auto-restart \
|
||||
--directory=./redash/ \
|
||||
--pattern=*.py \
|
||||
--recursive -- ./manage.py rq scheduler $QUEUES
|
||||
;;
|
||||
esac
|
||||
exec /app/manage.py rq scheduler
|
||||
}
|
||||
|
||||
dev_scheduler() {
|
||||
echo "Starting dev RQ scheduler..."
|
||||
|
||||
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq scheduler
|
||||
}
|
||||
|
||||
worker() {
|
||||
echo "Starting RQ worker..."
|
||||
|
||||
export WORKERS_COUNT=${WORKERS_COUNT:-2}
|
||||
export QUEUES=${QUEUES:-}
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting RQ worker in production mode"
|
||||
exec supervisord -c worker.conf
|
||||
;;
|
||||
*)
|
||||
echo "Starting RQ worker in dev mode"
|
||||
exec watchmedo auto-restart \
|
||||
--directory=./redash/ \
|
||||
--pattern=*.py \
|
||||
--recursive -- ./manage.py rq worker $QUEUES
|
||||
;;
|
||||
esac
|
||||
|
||||
exec supervisord -c worker.conf
|
||||
}
|
||||
|
||||
workers_healthcheck() {
|
||||
@@ -58,63 +35,22 @@ workers_healthcheck() {
|
||||
fi
|
||||
}
|
||||
|
||||
dev_worker() {
|
||||
echo "Starting dev RQ worker..."
|
||||
|
||||
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq worker $QUEUES
|
||||
}
|
||||
|
||||
server() {
|
||||
# Recycle gunicorn workers every n-th request. See http://docs.gunicorn.org/en/stable/settings.html#max-requests for more details.
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting Redash Server in production mode"
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn \
|
||||
-b 0.0.0.0:5000 \
|
||||
--name redash \
|
||||
-w${REDASH_WEB_WORKERS:-4} redash.wsgi:app \
|
||||
--max-requests $MAX_REQUESTS \
|
||||
--max-requests-jitter $MAX_REQUESTS_JITTER \
|
||||
--timeout $TIMEOUT
|
||||
;;
|
||||
*)
|
||||
echo "Starting Redash Server in a dev mode"
|
||||
export FLASK_DEBUG=1
|
||||
exec /app/manage.py runserver --debugger --reload -h 0.0.0.0
|
||||
;;
|
||||
esac
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT
|
||||
}
|
||||
|
||||
create_db() {
|
||||
REDASH_DATABASE_MIGRATE_TIMEOUT=${REDASH_DATABASE_UPGRADE_TIMEOUT:-600}
|
||||
REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS=${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS:-5}
|
||||
REDASH_DATABASE_MIGRATE_RETRY_WAIT=${REDASH_DATABASE_MIGRATE_RETRY_WAIT:-10}
|
||||
ATTEMPTS=1
|
||||
while ((ATTEMPTS <= REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS)); do
|
||||
echo "Creating or updating Redash database, attempt ${ATTEMPTS} of ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS}"
|
||||
ATTEMPTS=$((ATTEMPTS+1))
|
||||
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py database create_tables
|
||||
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py db upgrade
|
||||
STATUS=$(timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py status 2>&1)
|
||||
RETCODE=$?
|
||||
case "$RETCODE" in
|
||||
0)
|
||||
exit 0
|
||||
;;
|
||||
124)
|
||||
echo "Status command timed out after ${REDASH_DATABASE_MIGRATE_TIMEOUT} seconds."
|
||||
;;
|
||||
esac
|
||||
case "$STATUS" in
|
||||
*sqlalchemy.exc.OperationalError*)
|
||||
echo "Database not yet functional, waiting."
|
||||
;;
|
||||
*sqlalchemy.exc.ProgrammingError*)
|
||||
echo "Database does not appear to be installed."
|
||||
;;
|
||||
esac
|
||||
echo "Waiting ${REDASH_DATABASE_MIGRATE_RETRY_WAIT} seconds before retrying."
|
||||
sleep ${REDASH_DATABASE_MIGRATE_RETRY_WAIT}
|
||||
done
|
||||
echo "Reached ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS} attempts, giving up."
|
||||
exit 1
|
||||
exec /app/manage.py database create_tables
|
||||
}
|
||||
|
||||
help() {
|
||||
@@ -125,16 +61,21 @@ help() {
|
||||
|
||||
echo "server -- start Redash server (with gunicorn)"
|
||||
echo "worker -- start a single RQ worker"
|
||||
echo "dev_worker -- start a single RQ worker with code reloading"
|
||||
echo "scheduler -- start an rq-scheduler instance"
|
||||
echo "dev_scheduler -- start an rq-scheduler instance with code reloading"
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "create_db -- create database tables and run migrations"
|
||||
echo "create_db -- create database tables"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
}
|
||||
|
||||
tests() {
|
||||
export REDASH_DATABASE_URL="postgresql://postgres@postgres/tests"
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
TEST_ARGS=tests/
|
||||
else
|
||||
@@ -160,10 +101,22 @@ case "$1" in
|
||||
shift
|
||||
scheduler
|
||||
;;
|
||||
dev_scheduler)
|
||||
shift
|
||||
dev_scheduler
|
||||
;;
|
||||
dev_worker)
|
||||
shift
|
||||
dev_worker
|
||||
;;
|
||||
celery_healthcheck)
|
||||
shift
|
||||
echo "DEPRECATED: Celery has been replaced with RQ and now performs healthchecks autonomously as part of the 'worker' entrypoint."
|
||||
;;
|
||||
dev_server)
|
||||
export FLASK_DEBUG=1
|
||||
exec /app/manage.py runserver --debugger --reload -h 0.0.0.0
|
||||
;;
|
||||
debug)
|
||||
export FLASK_DEBUG=1
|
||||
export REMOTE_DEBUG=1
|
||||
|
||||
@@ -12,6 +12,7 @@ import { wrap as wrapDialog, DialogPropType } from "@/components/DialogWrapper";
|
||||
import QuerySelector from "@/components/QuerySelector";
|
||||
import { Query } from "@/services/query";
|
||||
import { useUniqueId } from "@/lib/hooks/useUniqueId";
|
||||
import "./EditParameterSettingsDialog.less";
|
||||
|
||||
const { Option } = Select;
|
||||
const formItemProps = { labelCol: { span: 6 }, wrapperCol: { span: 16 } };
|
||||
@@ -26,7 +27,7 @@ function isTypeDateRange(type) {
|
||||
|
||||
function joinExampleList(multiValuesOptions) {
|
||||
const { prefix, suffix } = multiValuesOptions;
|
||||
return ["value1", "value2", "value3"].map(value => `${prefix}${value}${suffix}`).join(",");
|
||||
return ["value1", "value2", "value3"].map((value) => `${prefix}${value}${suffix}`).join(",");
|
||||
}
|
||||
|
||||
function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
@@ -54,7 +55,7 @@ function NameInput({ name, type, onChange, existingNames, setValidation }) {
|
||||
|
||||
return (
|
||||
<Form.Item required label="Keyword" help={helpText} validateStatus={validateStatus} {...formItemProps}>
|
||||
<Input onChange={e => onChange(e.target.value)} autoFocus />
|
||||
<Input onChange={(e) => onChange(e.target.value)} autoFocus />
|
||||
</Form.Item>
|
||||
);
|
||||
}
|
||||
@@ -71,6 +72,8 @@ function EditParameterSettingsDialog(props) {
|
||||
const [param, setParam] = useState(clone(props.parameter));
|
||||
const [isNameValid, setIsNameValid] = useState(true);
|
||||
const [initialQuery, setInitialQuery] = useState();
|
||||
const [userInput, setUserInput] = useState(param.regex || "");
|
||||
const [isValidRegex, setIsValidRegex] = useState(true);
|
||||
|
||||
const isNew = !props.parameter.name;
|
||||
|
||||
@@ -114,6 +117,17 @@ function EditParameterSettingsDialog(props) {
|
||||
|
||||
const paramFormId = useUniqueId("paramForm");
|
||||
|
||||
const handleRegexChange = (e) => {
|
||||
setUserInput(e.target.value);
|
||||
try {
|
||||
new RegExp(e.target.value);
|
||||
setParam({ ...param, regex: e.target.value });
|
||||
setIsValidRegex(true);
|
||||
} catch (error) {
|
||||
setIsValidRegex(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
{...props.dialog.props}
|
||||
@@ -129,15 +143,17 @@ function EditParameterSettingsDialog(props) {
|
||||
disabled={!isFulfilled()}
|
||||
type="primary"
|
||||
form={paramFormId}
|
||||
data-test="SaveParameterSettings">
|
||||
data-test="SaveParameterSettings"
|
||||
>
|
||||
{isNew ? "Add Parameter" : "OK"}
|
||||
</Button>,
|
||||
]}>
|
||||
]}
|
||||
>
|
||||
<Form layout="horizontal" onFinish={onConfirm} id={paramFormId}>
|
||||
{isNew && (
|
||||
<NameInput
|
||||
name={param.name}
|
||||
onChange={name => setParam({ ...param, name })}
|
||||
onChange={(name) => setParam({ ...param, name })}
|
||||
setValidation={setIsNameValid}
|
||||
existingNames={props.existingParams}
|
||||
type={param.type}
|
||||
@@ -146,15 +162,16 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item required label="Title" {...formItemProps}>
|
||||
<Input
|
||||
value={isNull(param.title) ? getDefaultTitle(param.name) : param.title}
|
||||
onChange={e => setParam({ ...param, title: e.target.value })}
|
||||
onChange={(e) => setParam({ ...param, title: e.target.value })}
|
||||
data-test="ParameterTitleInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item label="Type" {...formItemProps}>
|
||||
<Select value={param.type} onChange={type => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Select value={param.type} onChange={(type) => setParam({ ...param, type })} data-test="ParameterTypeSelect">
|
||||
<Option value="text" data-test="TextParameterTypeOption">
|
||||
Text
|
||||
</Option>
|
||||
<Option value="text-pattern">Text Pattern</Option>
|
||||
<Option value="number" data-test="NumberParameterTypeOption">
|
||||
Number
|
||||
</Option>
|
||||
@@ -180,12 +197,26 @@ function EditParameterSettingsDialog(props) {
|
||||
<Option value="datetime-range-with-seconds">Date and Time Range (with seconds)</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
{param.type === "text-pattern" && (
|
||||
<Form.Item
|
||||
label="Regex"
|
||||
help={!isValidRegex ? "Invalid Regex Pattern" : "Valid Regex Pattern"}
|
||||
{...formItemProps}
|
||||
>
|
||||
<Input
|
||||
value={userInput}
|
||||
onChange={handleRegexChange}
|
||||
className={!isValidRegex ? "input-error" : ""}
|
||||
data-test="RegexPatternInput"
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
{param.type === "enum" && (
|
||||
<Form.Item label="Values" help="Dropdown list values (newline delimited)" {...formItemProps}>
|
||||
<Input.TextArea
|
||||
rows={3}
|
||||
value={param.enumOptions}
|
||||
onChange={e => setParam({ ...param, enumOptions: e.target.value })}
|
||||
onChange={(e) => setParam({ ...param, enumOptions: e.target.value })}
|
||||
/>
|
||||
</Form.Item>
|
||||
)}
|
||||
@@ -193,7 +224,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item label="Query" help="Select query to load dropdown values from" {...formItemProps}>
|
||||
<QuerySelector
|
||||
selectedQuery={initialQuery}
|
||||
onChange={q => setParam({ ...param, queryId: q && q.id })}
|
||||
onChange={(q) => setParam({ ...param, queryId: q && q.id })}
|
||||
type="select"
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -202,7 +233,7 @@ function EditParameterSettingsDialog(props) {
|
||||
<Form.Item className="m-b-0" label=" " colon={false} {...formItemProps}>
|
||||
<Checkbox
|
||||
defaultChecked={!!param.multiValuesOptions}
|
||||
onChange={e =>
|
||||
onChange={(e) =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: e.target.checked
|
||||
@@ -214,7 +245,8 @@ function EditParameterSettingsDialog(props) {
|
||||
: null,
|
||||
})
|
||||
}
|
||||
data-test="AllowMultipleValuesCheckbox">
|
||||
data-test="AllowMultipleValuesCheckbox"
|
||||
>
|
||||
Allow multiple values
|
||||
</Checkbox>
|
||||
</Form.Item>
|
||||
@@ -227,10 +259,11 @@ function EditParameterSettingsDialog(props) {
|
||||
Placed in query as: <code>{joinExampleList(param.multiValuesOptions)}</code>
|
||||
</React.Fragment>
|
||||
}
|
||||
{...formItemProps}>
|
||||
{...formItemProps}
|
||||
>
|
||||
<Select
|
||||
value={param.multiValuesOptions.prefix}
|
||||
onChange={quoteOption =>
|
||||
onChange={(quoteOption) =>
|
||||
setParam({
|
||||
...param,
|
||||
multiValuesOptions: {
|
||||
@@ -240,7 +273,8 @@ function EditParameterSettingsDialog(props) {
|
||||
},
|
||||
})
|
||||
}
|
||||
data-test="QuotationSelect">
|
||||
data-test="QuotationSelect"
|
||||
>
|
||||
<Option value="">None (default)</Option>
|
||||
<Option value="'">Single Quotation Mark</Option>
|
||||
<Option value={'"'} data-test="DoubleQuotationMarkOption">
|
||||
|
||||
3
client/app/components/EditParameterSettingsDialog.less
Normal file
3
client/app/components/EditParameterSettingsDialog.less
Normal file
@@ -0,0 +1,3 @@
|
||||
.input-error {
|
||||
border-color: red !important;
|
||||
}
|
||||
@@ -33,10 +33,10 @@ export const MappingType = {
|
||||
};
|
||||
|
||||
export function parameterMappingsToEditableMappings(mappings, parameters, existingParameterNames = []) {
|
||||
return map(mappings, mapping => {
|
||||
return map(mappings, (mapping) => {
|
||||
const result = extend({}, mapping);
|
||||
const alreadyExists = includes(existingParameterNames, mapping.mapTo);
|
||||
result.param = find(parameters, p => p.name === mapping.name);
|
||||
result.param = find(parameters, (p) => p.name === mapping.name);
|
||||
switch (mapping.type) {
|
||||
case ParameterMappingType.DashboardLevel:
|
||||
result.type = alreadyExists ? MappingType.DashboardMapToExisting : MappingType.DashboardAddNew;
|
||||
@@ -62,7 +62,7 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
map(
|
||||
// convert to map
|
||||
mappings,
|
||||
mapping => {
|
||||
(mapping) => {
|
||||
const result = extend({}, mapping);
|
||||
switch (mapping.type) {
|
||||
case MappingType.DashboardAddNew:
|
||||
@@ -95,11 +95,11 @@ export function editableMappingsToParameterMappings(mappings) {
|
||||
export function synchronizeWidgetTitles(sourceMappings, widgets) {
|
||||
const affectedWidgets = [];
|
||||
|
||||
each(sourceMappings, sourceMapping => {
|
||||
each(sourceMappings, (sourceMapping) => {
|
||||
if (sourceMapping.type === ParameterMappingType.DashboardLevel) {
|
||||
each(widgets, widget => {
|
||||
each(widgets, (widget) => {
|
||||
const widgetMappings = widget.options.parameterMappings;
|
||||
each(widgetMappings, widgetMapping => {
|
||||
each(widgetMappings, (widgetMapping) => {
|
||||
// check if mapped to the same dashboard-level parameter
|
||||
if (
|
||||
widgetMapping.type === ParameterMappingType.DashboardLevel &&
|
||||
@@ -140,7 +140,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
className: "form-item",
|
||||
};
|
||||
|
||||
updateSourceType = type => {
|
||||
updateSourceType = (type) => {
|
||||
let {
|
||||
mapping: { mapTo },
|
||||
} = this.props;
|
||||
@@ -155,7 +155,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
this.updateParamMapping({ type, mapTo });
|
||||
};
|
||||
|
||||
updateParamMapping = update => {
|
||||
updateParamMapping = (update) => {
|
||||
const { onChange, mapping } = this.props;
|
||||
const newMapping = extend({}, mapping, update);
|
||||
if (newMapping.value !== mapping.value) {
|
||||
@@ -175,7 +175,7 @@ export class ParameterMappingInput extends React.Component {
|
||||
renderMappingTypeSelector() {
|
||||
const noExisting = isEmpty(this.props.existingParamNames);
|
||||
return (
|
||||
<Radio.Group value={this.props.mapping.type} onChange={e => this.updateSourceType(e.target.value)}>
|
||||
<Radio.Group value={this.props.mapping.type} onChange={(e) => this.updateSourceType(e.target.value)}>
|
||||
<Radio className="radio" value={MappingType.DashboardAddNew} data-test="NewDashboardParameterOption">
|
||||
New dashboard parameter
|
||||
</Radio>
|
||||
@@ -205,16 +205,16 @@ export class ParameterMappingInput extends React.Component {
|
||||
<Input
|
||||
value={mapTo}
|
||||
aria-label="Parameter name (key)"
|
||||
onChange={e => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
onChange={(e) => this.updateParamMapping({ mapTo: e.target.value })}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderDashboardMapToExisting() {
|
||||
const { mapping, existingParamNames } = this.props;
|
||||
const options = map(existingParamNames, paramName => ({ label: paramName, value: paramName }));
|
||||
const options = map(existingParamNames, (paramName) => ({ label: paramName, value: paramName }));
|
||||
|
||||
return <Select value={mapping.mapTo} onChange={mapTo => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
return <Select value={mapping.mapTo} onChange={(mapTo) => this.updateParamMapping({ mapTo })} options={options} />;
|
||||
}
|
||||
|
||||
renderStaticValue() {
|
||||
@@ -226,7 +226,8 @@ export class ParameterMappingInput extends React.Component {
|
||||
enumOptions={mapping.param.enumOptions}
|
||||
queryId={mapping.param.queryId}
|
||||
parameter={mapping.param}
|
||||
onSelect={value => this.updateParamMapping({ value })}
|
||||
onSelect={(value) => this.updateParamMapping({ value })}
|
||||
regex={mapping.param.regex}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -284,12 +285,12 @@ class MappingEditor extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
onVisibleChange = visible => {
|
||||
onVisibleChange = (visible) => {
|
||||
if (visible) this.show();
|
||||
else this.hide();
|
||||
};
|
||||
|
||||
onChange = mapping => {
|
||||
onChange = (mapping) => {
|
||||
let inputError = null;
|
||||
|
||||
if (mapping.type === MappingType.DashboardAddNew) {
|
||||
@@ -351,7 +352,8 @@ class MappingEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderContent()}
|
||||
visible={visible}
|
||||
onVisibleChange={this.onVisibleChange}>
|
||||
onVisibleChange={this.onVisibleChange}
|
||||
>
|
||||
<Button size="small" type="dashed" data-test={`EditParamMappingButton-${mapping.param.name}`}>
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -376,14 +378,14 @@ class TitleEditor extends React.Component {
|
||||
title: "", // will be set on editing
|
||||
};
|
||||
|
||||
onPopupVisibleChange = showPopup => {
|
||||
onPopupVisibleChange = (showPopup) => {
|
||||
this.setState({
|
||||
showPopup,
|
||||
title: showPopup ? this.getMappingTitle() : "",
|
||||
});
|
||||
};
|
||||
|
||||
onEditingTitleChange = event => {
|
||||
onEditingTitleChange = (event) => {
|
||||
this.setState({ title: event.target.value });
|
||||
};
|
||||
|
||||
@@ -460,7 +462,8 @@ class TitleEditor extends React.Component {
|
||||
trigger="click"
|
||||
content={this.renderPopover()}
|
||||
visible={this.state.showPopup}
|
||||
onVisibleChange={this.onPopupVisibleChange}>
|
||||
onVisibleChange={this.onPopupVisibleChange}
|
||||
>
|
||||
<Button size="small" type="dashed">
|
||||
<EditOutlinedIcon />
|
||||
</Button>
|
||||
@@ -508,7 +511,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
// just to be safe, array or object
|
||||
if (typeof value === "object") {
|
||||
return map(value, v => this.getStringValue(v)).join(", ");
|
||||
return map(value, (v) => this.getStringValue(v)).join(", ");
|
||||
}
|
||||
|
||||
// rest
|
||||
@@ -574,7 +577,7 @@ export class ParameterMappingListInput extends React.Component {
|
||||
|
||||
render() {
|
||||
const { existingParams } = this.props; // eslint-disable-line react/prop-types
|
||||
const dataSource = this.props.mappings.map(mapping => ({ mapping }));
|
||||
const dataSource = this.props.mappings.map((mapping) => ({ mapping }));
|
||||
|
||||
return (
|
||||
<div className="parameters-mapping-list">
|
||||
@@ -583,11 +586,11 @@ export class ParameterMappingListInput extends React.Component {
|
||||
title="Title"
|
||||
dataIndex="mapping"
|
||||
key="title"
|
||||
render={mapping => (
|
||||
render={(mapping) => (
|
||||
<TitleEditor
|
||||
existingParams={existingParams}
|
||||
mapping={mapping}
|
||||
onChange={newMapping => this.updateParamMapping(mapping, newMapping)}
|
||||
onChange={(newMapping) => this.updateParamMapping(mapping, newMapping)}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
@@ -596,19 +599,19 @@ export class ParameterMappingListInput extends React.Component {
|
||||
dataIndex="mapping"
|
||||
key="keyword"
|
||||
className="keyword"
|
||||
render={mapping => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
render={(mapping) => <code>{`{{ ${mapping.name} }}`}</code>}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Default Value"
|
||||
dataIndex="mapping"
|
||||
key="value"
|
||||
render={mapping => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
render={(mapping) => this.constructor.getDefaultValue(mapping, this.props.existingParams)}
|
||||
/>
|
||||
<Table.Column
|
||||
title="Value Source"
|
||||
dataIndex="mapping"
|
||||
key="source"
|
||||
render={mapping => {
|
||||
render={(mapping) => {
|
||||
const existingParamsNames = existingParams
|
||||
.filter(({ type }) => type === mapping.param.type) // exclude mismatching param types
|
||||
.map(({ name }) => name); // keep names only
|
||||
|
||||
@@ -9,11 +9,12 @@ import DateRangeParameter from "@/components/dynamic-parameters/DateRangeParamet
|
||||
import QueryBasedParameterInput from "./QueryBasedParameterInput";
|
||||
|
||||
import "./ParameterValueInput.less";
|
||||
import Tooltip from "./Tooltip";
|
||||
|
||||
const multipleValuesProps = {
|
||||
maxTagCount: 3,
|
||||
maxTagTextLength: 10,
|
||||
maxTagPlaceholder: num => `+${num.length} more`,
|
||||
maxTagPlaceholder: (num) => `+${num.length} more`,
|
||||
};
|
||||
|
||||
class ParameterValueInput extends React.Component {
|
||||
@@ -25,6 +26,7 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: PropTypes.any, // eslint-disable-line react/forbid-prop-types
|
||||
onSelect: PropTypes.func,
|
||||
className: PropTypes.string,
|
||||
regex: PropTypes.string,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
@@ -35,6 +37,7 @@ class ParameterValueInput extends React.Component {
|
||||
parameter: null,
|
||||
onSelect: () => {},
|
||||
className: "",
|
||||
regex: "",
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
@@ -45,7 +48,7 @@ class ParameterValueInput extends React.Component {
|
||||
};
|
||||
}
|
||||
|
||||
componentDidUpdate = prevProps => {
|
||||
componentDidUpdate = (prevProps) => {
|
||||
const { value, parameter } = this.props;
|
||||
// if value prop updated, reset dirty state
|
||||
if (prevProps.value !== value || prevProps.parameter !== parameter) {
|
||||
@@ -56,7 +59,7 @@ class ParameterValueInput extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onSelect = value => {
|
||||
onSelect = (value) => {
|
||||
const isDirty = !isEqual(value, this.props.value);
|
||||
this.setState({ value, isDirty });
|
||||
this.props.onSelect(value, isDirty);
|
||||
@@ -93,9 +96,9 @@ class ParameterValueInput extends React.Component {
|
||||
renderEnumInput() {
|
||||
const { enumOptions, parameter } = this.props;
|
||||
const { value } = this.state;
|
||||
const enumOptionsArray = enumOptions.split("\n").filter(v => v !== "");
|
||||
const enumOptionsArray = enumOptions.split("\n").filter((v) => v !== "");
|
||||
// Antd Select doesn't handle null in multiple mode
|
||||
const normalize = val => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
const normalize = (val) => (parameter.multiValuesOptions && val === null ? [] : val);
|
||||
|
||||
return (
|
||||
<SelectWithVirtualScroll
|
||||
@@ -103,7 +106,7 @@ class ParameterValueInput extends React.Component {
|
||||
mode={parameter.multiValuesOptions ? "multiple" : "default"}
|
||||
value={normalize(value)}
|
||||
onChange={this.onSelect}
|
||||
options={map(enumOptionsArray, opt => ({ label: String(opt), value: opt }))}
|
||||
options={map(enumOptionsArray, (opt) => ({ label: String(opt), value: opt }))}
|
||||
showSearch
|
||||
showArrow
|
||||
notFoundContent={isEmpty(enumOptionsArray) ? "No options available" : null}
|
||||
@@ -133,18 +136,36 @@ class ParameterValueInput extends React.Component {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
const normalize = val => (isNaN(val) ? undefined : val);
|
||||
const normalize = (val) => (isNaN(val) ? undefined : val);
|
||||
|
||||
return (
|
||||
<InputNumber
|
||||
className={className}
|
||||
value={normalize(value)}
|
||||
aria-label="Parameter number value"
|
||||
onChange={val => this.onSelect(normalize(val))}
|
||||
onChange={(val) => this.onSelect(normalize(val))}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextPatternInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
return (
|
||||
<React.Fragment>
|
||||
<Tooltip title={`Regex to match: ${this.props.regex}`} placement="right">
|
||||
<Input
|
||||
className={className}
|
||||
value={value}
|
||||
aria-label="Parameter text pattern value"
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
renderTextInput() {
|
||||
const { className } = this.props;
|
||||
const { value } = this.state;
|
||||
@@ -155,7 +176,7 @@ class ParameterValueInput extends React.Component {
|
||||
value={value}
|
||||
aria-label="Parameter text value"
|
||||
data-test="TextParamInput"
|
||||
onChange={e => this.onSelect(e.target.value)}
|
||||
onChange={(e) => this.onSelect(e.target.value)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -177,6 +198,8 @@ class ParameterValueInput extends React.Component {
|
||||
return this.renderQueryBasedInput();
|
||||
case "number":
|
||||
return this.renderNumberInput();
|
||||
case "text-pattern":
|
||||
return this.renderTextPatternInput();
|
||||
default:
|
||||
return this.renderTextInput();
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import "./Parameters.less";
|
||||
|
||||
function updateUrl(parameters) {
|
||||
const params = extend({}, location.search);
|
||||
parameters.forEach(param => {
|
||||
parameters.forEach((param) => {
|
||||
extend(params, param.toUrlParams());
|
||||
});
|
||||
location.setSearch(params, true);
|
||||
@@ -43,7 +43,7 @@ export default class Parameters extends React.Component {
|
||||
appendSortableToParent: true,
|
||||
};
|
||||
|
||||
toCamelCase = str => {
|
||||
toCamelCase = (str) => {
|
||||
if (isEmpty(str)) {
|
||||
return "";
|
||||
}
|
||||
@@ -59,10 +59,10 @@ export default class Parameters extends React.Component {
|
||||
}
|
||||
const hideRegex = /hide_filter=([^&]+)/g;
|
||||
const matches = window.location.search.matchAll(hideRegex);
|
||||
this.hideValues = Array.from(matches, match => match[1]);
|
||||
this.hideValues = Array.from(matches, (match) => match[1]);
|
||||
}
|
||||
|
||||
componentDidUpdate = prevProps => {
|
||||
componentDidUpdate = (prevProps) => {
|
||||
const { parameters, disableUrlUpdate } = this.props;
|
||||
const parametersChanged = prevProps.parameters !== parameters;
|
||||
const disableUrlUpdateChanged = prevProps.disableUrlUpdate !== disableUrlUpdate;
|
||||
@@ -74,7 +74,7 @@ export default class Parameters extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
handleKeyDown = e => {
|
||||
handleKeyDown = (e) => {
|
||||
// Cmd/Ctrl/Alt + Enter
|
||||
if (e.keyCode === 13 && (e.ctrlKey || e.metaKey || e.altKey)) {
|
||||
e.stopPropagation();
|
||||
@@ -109,8 +109,8 @@ export default class Parameters extends React.Component {
|
||||
applyChanges = () => {
|
||||
const { onValuesChange, disableUrlUpdate } = this.props;
|
||||
this.setState(({ parameters }) => {
|
||||
const parametersWithPendingValues = parameters.filter(p => p.hasPendingValue);
|
||||
forEach(parameters, p => p.applyPendingValue());
|
||||
const parametersWithPendingValues = parameters.filter((p) => p.hasPendingValue);
|
||||
forEach(parameters, (p) => p.applyPendingValue());
|
||||
if (!disableUrlUpdate) {
|
||||
updateUrl(parameters);
|
||||
}
|
||||
@@ -121,7 +121,7 @@ export default class Parameters extends React.Component {
|
||||
|
||||
showParameterSettings = (parameter, index) => {
|
||||
const { onParametersEdit } = this.props;
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose(updated => {
|
||||
EditParameterSettingsDialog.showModal({ parameter }).onClose((updated) => {
|
||||
this.setState(({ parameters }) => {
|
||||
const updatedParameter = extend(parameter, updated);
|
||||
parameters[index] = createParameter(updatedParameter, updatedParameter.parentQueryId);
|
||||
@@ -132,7 +132,7 @@ export default class Parameters extends React.Component {
|
||||
};
|
||||
|
||||
renderParameter(param, index) {
|
||||
if (this.hideValues.some(value => this.toCamelCase(value) === this.toCamelCase(param.name))) {
|
||||
if (this.hideValues.some((value) => this.toCamelCase(value) === this.toCamelCase(param.name))) {
|
||||
return null;
|
||||
}
|
||||
const { editable } = this.props;
|
||||
@@ -149,7 +149,8 @@ export default class Parameters extends React.Component {
|
||||
aria-label="Edit"
|
||||
onClick={() => this.showParameterSettings(param, index)}
|
||||
data-test={`ParameterSettings-${param.name}`}
|
||||
type="button">
|
||||
type="button"
|
||||
>
|
||||
<i className="fa fa-cog" aria-hidden="true" />
|
||||
</PlainButton>
|
||||
)}
|
||||
@@ -162,6 +163,7 @@ export default class Parameters extends React.Component {
|
||||
enumOptions={param.enumOptions}
|
||||
queryId={param.queryId}
|
||||
onSelect={(value, isDirty) => this.setPendingValue(param, value, isDirty)}
|
||||
regex={param.regex}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
@@ -178,20 +180,22 @@ export default class Parameters extends React.Component {
|
||||
useDragHandle
|
||||
lockToContainerEdges
|
||||
helperClass="parameter-dragged"
|
||||
helperContainer={containerEl => (appendSortableToParent ? containerEl : document.body)}
|
||||
helperContainer={(containerEl) => (appendSortableToParent ? containerEl : document.body)}
|
||||
updateBeforeSortStart={this.onBeforeSortStart}
|
||||
onSortEnd={this.moveParameter}
|
||||
containerProps={{
|
||||
className: "parameter-container",
|
||||
onKeyDown: dirtyParamCount ? this.handleKeyDown : null,
|
||||
}}>
|
||||
}}
|
||||
>
|
||||
{parameters &&
|
||||
parameters.map((param, index) => (
|
||||
<SortableElement key={param.name} index={index}>
|
||||
<div
|
||||
className="parameter-block"
|
||||
data-editable={sortable || null}
|
||||
data-test={`ParameterBlock-${param.name}`}>
|
||||
data-test={`ParameterBlock-${param.name}`}
|
||||
>
|
||||
{sortable && <DragHandle data-test={`DragHandle-${param.name}`} />}
|
||||
{this.renderParameter(param, index)}
|
||||
</div>
|
||||
|
||||
@@ -19,7 +19,6 @@ import PlainButton from "@/components/PlainButton";
|
||||
import ExpandedWidgetDialog from "@/components/dashboards/ExpandedWidgetDialog";
|
||||
import EditParameterMappingsDialog from "@/components/dashboards/EditParameterMappingsDialog";
|
||||
import VisualizationRenderer from "@/components/visualizations/VisualizationRenderer";
|
||||
import { ExecutionStatus } from "@/services/query-result";
|
||||
|
||||
import Widget from "./Widget";
|
||||
|
||||
@@ -279,7 +278,7 @@ class VisualizationWidget extends React.Component {
|
||||
const widgetQueryResult = widget.getQueryResult();
|
||||
const widgetStatus = widgetQueryResult && widgetQueryResult.getStatus();
|
||||
switch (widgetStatus) {
|
||||
case ExecutionStatus.FAILED:
|
||||
case "failed":
|
||||
return (
|
||||
<div className="body-row-auto scrollbox">
|
||||
{widgetQueryResult.getError() && (
|
||||
@@ -289,7 +288,7 @@ class VisualizationWidget extends React.Component {
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
case ExecutionStatus.FINISHED:
|
||||
case "done":
|
||||
return (
|
||||
<div className="body-row-auto scrollbox">
|
||||
<VisualizationRenderer
|
||||
|
||||
@@ -65,6 +65,7 @@ export const Query = PropTypes.shape({
|
||||
|
||||
export const AlertOptions = PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.oneOf(["first", "min", "max"]),
|
||||
op: PropTypes.oneOf([">", ">=", "<", "<=", "==", "!="]),
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
custom_subject: PropTypes.string,
|
||||
@@ -83,6 +84,7 @@ export const Alert = PropTypes.shape({
|
||||
query: Query,
|
||||
options: PropTypes.shape({
|
||||
column: PropTypes.string,
|
||||
selector: PropTypes.string,
|
||||
op: PropTypes.string,
|
||||
value: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
|
||||
}).isRequired,
|
||||
|
||||
@@ -16,7 +16,6 @@ import LoadingState from "../items-list/components/LoadingState";
|
||||
const SchemaItemColumnType = PropTypes.shape({
|
||||
name: PropTypes.string.isRequired,
|
||||
type: PropTypes.string,
|
||||
comment: PropTypes.string,
|
||||
});
|
||||
|
||||
export const SchemaItemType = PropTypes.shape({
|
||||
@@ -48,30 +47,13 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
|
||||
return (
|
||||
<div {...props}>
|
||||
<div className="schema-list-item">
|
||||
{item.description ? (
|
||||
<Tooltip
|
||||
title={item.description}
|
||||
mouseEnterDelay={0}
|
||||
mouseLeaveDelay={0}
|
||||
placement="right"
|
||||
arrowPointAtCenter>
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
)}
|
||||
<PlainButton className="table-name" onClick={onToggle}>
|
||||
<i className="fa fa-table m-r-5" aria-hidden="true" />
|
||||
<strong>
|
||||
<span title={item.name}>{tableDisplayName}</span>
|
||||
{!isNil(item.size) && <span> ({item.size})</span>}
|
||||
</strong>
|
||||
</PlainButton>
|
||||
<Tooltip
|
||||
title="Insert table name into query text"
|
||||
mouseEnterDelay={0}
|
||||
@@ -91,34 +73,22 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
|
||||
map(item.columns, column => {
|
||||
const columnName = get(column, "name");
|
||||
const columnType = get(column, "type");
|
||||
const columnComment = get(column, "comment");
|
||||
if (columnComment) {
|
||||
return (
|
||||
<Tooltip title={columnComment} mouseEnterDelay={0} mouseLeaveDelay={0} placement="rightTop">
|
||||
<PlainButton
|
||||
key={columnName}
|
||||
className="table-open-item"
|
||||
onClick={e => handleSelect(e, columnName)}>
|
||||
<div>
|
||||
{columnName} {columnType && <span className="column-type">{columnType}</span>}
|
||||
</div>
|
||||
|
||||
<div className="copy-to-editor">
|
||||
<i className="fa fa-angle-double-right" aria-hidden="true" />
|
||||
</div>
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<PlainButton key={columnName} className="table-open-item" onClick={e => handleSelect(e, columnName)}>
|
||||
<div>
|
||||
{columnName} {columnType && <span className="column-type">{columnType}</span>}
|
||||
</div>
|
||||
<div className="copy-to-editor">
|
||||
<i className="fa fa-angle-double-right" aria-hidden="true" />
|
||||
</div>
|
||||
</PlainButton>
|
||||
<Tooltip
|
||||
title="Insert column name into query text"
|
||||
mouseEnterDelay={0}
|
||||
mouseLeaveDelay={0}
|
||||
placement="rightTop">
|
||||
<PlainButton key={columnName} className="table-open-item" onClick={e => handleSelect(e, columnName)}>
|
||||
<div>
|
||||
{columnName} {columnType && <span className="column-type">{columnType}</span>}
|
||||
</div>
|
||||
|
||||
<div className="copy-to-editor">
|
||||
<i className="fa fa-angle-double-right" aria-hidden="true" />
|
||||
</div>
|
||||
</PlainButton>
|
||||
</Tooltip>
|
||||
);
|
||||
})
|
||||
)}
|
||||
|
||||
@@ -16,6 +16,7 @@ import MenuButton from "./components/MenuButton";
|
||||
import AlertView from "./AlertView";
|
||||
import AlertEdit from "./AlertEdit";
|
||||
import AlertNew from "./AlertNew";
|
||||
import notifications from "@/services/notifications";
|
||||
|
||||
const MODES = {
|
||||
NEW: 0,
|
||||
@@ -64,6 +65,7 @@ class Alert extends React.Component {
|
||||
this.setState({
|
||||
alert: {
|
||||
options: {
|
||||
selector: "first",
|
||||
op: ">",
|
||||
value: 1,
|
||||
muted: false,
|
||||
@@ -75,7 +77,7 @@ class Alert extends React.Component {
|
||||
} else {
|
||||
const { alertId } = this.props;
|
||||
AlertService.get({ id: alertId })
|
||||
.then(alert => {
|
||||
.then((alert) => {
|
||||
if (this._isMounted) {
|
||||
const canEdit = currentUser.canEdit(alert);
|
||||
|
||||
@@ -93,7 +95,7 @@ class Alert extends React.Component {
|
||||
this.onQuerySelected(alert.query);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
.catch((error) => {
|
||||
if (this._isMounted) {
|
||||
this.props.onError(error);
|
||||
}
|
||||
@@ -112,7 +114,7 @@ class Alert extends React.Component {
|
||||
alert.rearm = pendingRearm || null;
|
||||
|
||||
return AlertService.save(alert)
|
||||
.then(alert => {
|
||||
.then((alert) => {
|
||||
notification.success("Saved.");
|
||||
navigateTo(`alerts/${alert.id}`, true);
|
||||
this.setState({ alert, mode: MODES.VIEW });
|
||||
@@ -122,7 +124,7 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
onQuerySelected = query => {
|
||||
onQuerySelected = (query) => {
|
||||
this.setState(({ alert }) => ({
|
||||
alert: Object.assign(alert, { query }),
|
||||
queryResult: null,
|
||||
@@ -130,7 +132,7 @@ class Alert extends React.Component {
|
||||
|
||||
if (query) {
|
||||
// get cached result for column names and values
|
||||
new QueryService(query).getQueryResultPromise().then(queryResult => {
|
||||
new QueryService(query).getQueryResultPromise().then((queryResult) => {
|
||||
if (this._isMounted) {
|
||||
this.setState({ queryResult });
|
||||
let { column } = this.state.alert.options;
|
||||
@@ -146,18 +148,18 @@ class Alert extends React.Component {
|
||||
}
|
||||
};
|
||||
|
||||
onNameChange = name => {
|
||||
onNameChange = (name) => {
|
||||
const { alert } = this.state;
|
||||
this.setState({
|
||||
alert: Object.assign(alert, { name }),
|
||||
});
|
||||
};
|
||||
|
||||
onRearmChange = pendingRearm => {
|
||||
onRearmChange = (pendingRearm) => {
|
||||
this.setState({ pendingRearm });
|
||||
};
|
||||
|
||||
setAlertOptions = obj => {
|
||||
setAlertOptions = (obj) => {
|
||||
const { alert } = this.state;
|
||||
const options = { ...alert.options, ...obj };
|
||||
this.setState({
|
||||
@@ -177,6 +179,17 @@ class Alert extends React.Component {
|
||||
});
|
||||
};
|
||||
|
||||
evaluate = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.evaluate(alert)
|
||||
.then(() => {
|
||||
notification.success("Alert evaluated. Refresh page for updated status.");
|
||||
})
|
||||
.catch(() => {
|
||||
notifications.error("Failed to evaluate alert.");
|
||||
});
|
||||
};
|
||||
|
||||
mute = () => {
|
||||
const { alert } = this.state;
|
||||
return AlertService.mute(alert)
|
||||
@@ -223,7 +236,14 @@ class Alert extends React.Component {
|
||||
const { queryResult, mode, canEdit, pendingRearm } = this.state;
|
||||
|
||||
const menuButton = (
|
||||
<MenuButton doDelete={this.delete} muted={muted} mute={this.mute} unmute={this.unmute} canEdit={canEdit} />
|
||||
<MenuButton
|
||||
doDelete={this.delete}
|
||||
muted={muted}
|
||||
mute={this.mute}
|
||||
unmute={this.unmute}
|
||||
canEdit={canEdit}
|
||||
evaluate={this.evaluate}
|
||||
/>
|
||||
);
|
||||
|
||||
const commonProps = {
|
||||
@@ -258,7 +278,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/new",
|
||||
title: "New Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.NEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -266,7 +286,7 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId",
|
||||
title: "Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.VIEW} />,
|
||||
})
|
||||
);
|
||||
routes.register(
|
||||
@@ -274,6 +294,6 @@ routes.register(
|
||||
routeWithUserSession({
|
||||
path: "/alerts/:alertId/edit",
|
||||
title: "Alert",
|
||||
render: pageProps => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
render: (pageProps) => <Alert {...pageProps} mode={MODES.EDIT} />,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -68,13 +68,23 @@ export default class AlertView extends React.Component {
|
||||
<>
|
||||
<Title name={name} alert={alert}>
|
||||
<DynamicComponent name="AlertView.HeaderExtra" alert={alert} />
|
||||
<Tooltip title={canEdit ? "" : "You do not have sufficient permissions to edit this alert"}>
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</Tooltip>
|
||||
{canEdit ? (
|
||||
<>
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</>
|
||||
) : (
|
||||
<Tooltip title="You do not have sufficient permissions to edit this alert">
|
||||
<Button type="default" onClick={canEdit ? onEdit : null} className={cx({ disabled: !canEdit })}>
|
||||
<i className="fa fa-edit m-r-5" aria-hidden="true" />
|
||||
Edit
|
||||
</Button>
|
||||
{menuButton}
|
||||
</Tooltip>
|
||||
)}
|
||||
</Title>
|
||||
<div className="bg-white tiled p-20">
|
||||
<Grid.Row type="flex" gutter={16}>
|
||||
|
||||
@@ -54,23 +54,70 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
return null;
|
||||
})();
|
||||
|
||||
const columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
let columnHint;
|
||||
|
||||
if (alertOptions.selector === "first") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Top row value is <code className="p-0">{toString(columnValue) || "unknown"}</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "max") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Max column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(Math.max(...resultValues.map((o) => o[alertOptions.column]))) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
} else if (alertOptions.selector === "min") {
|
||||
columnHint = (
|
||||
<small className="alert-criteria-hint">
|
||||
Min column value is{" "}
|
||||
<code className="p-0">
|
||||
{toString(Math.min(...resultValues.map((o) => o[alertOptions.column]))) || "unknown"}
|
||||
</code>
|
||||
</small>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div data-test="Criteria">
|
||||
<div className="input-title">
|
||||
<span className="input-label">Selector</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.selector}
|
||||
onChange={(selector) => onChange({ selector })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 80 }}
|
||||
>
|
||||
<Select.Option value="first" label="first">
|
||||
first
|
||||
</Select.Option>
|
||||
<Select.Option value="min" label="min">
|
||||
min
|
||||
</Select.Option>
|
||||
<Select.Option value="max" label="max">
|
||||
max
|
||||
</Select.Option>
|
||||
</Select>
|
||||
) : (
|
||||
<DisabledInput minWidth={60}>{alertOptions.selector}</DisabledInput>
|
||||
)}
|
||||
</div>
|
||||
<div className="input-title">
|
||||
<span className="input-label">Value column</span>
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.column}
|
||||
onChange={column => onChange({ column })}
|
||||
onChange={(column) => onChange({ column })}
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ minWidth: 100 }}>
|
||||
{columnNames.map(name => (
|
||||
style={{ minWidth: 100 }}
|
||||
>
|
||||
{columnNames.map((name) => (
|
||||
<Select.Option key={name}>{name}</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
@@ -83,10 +130,11 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
{editMode ? (
|
||||
<Select
|
||||
value={alertOptions.op}
|
||||
onChange={op => onChange({ op })}
|
||||
onChange={(op) => onChange({ op })}
|
||||
optionLabelProp="label"
|
||||
dropdownMatchSelectWidth={false}
|
||||
style={{ width: 55 }}>
|
||||
style={{ width: 55 }}
|
||||
>
|
||||
<Select.Option value=">" label={CONDITIONS[">"]}>
|
||||
{CONDITIONS[">"]} greater than
|
||||
</Select.Option>
|
||||
@@ -125,7 +173,7 @@ export default function Criteria({ columnNames, resultValues, alertOptions, onCh
|
||||
id="threshold-criterion"
|
||||
style={{ width: 90 }}
|
||||
value={alertOptions.value}
|
||||
onChange={e => onChange({ value: e.target.value })}
|
||||
onChange={(e) => onChange({ value: e.target.value })}
|
||||
/>
|
||||
) : (
|
||||
<DisabledInput minWidth={50}>{alertOptions.value}</DisabledInput>
|
||||
|
||||
@@ -11,7 +11,7 @@ import LoadingOutlinedIcon from "@ant-design/icons/LoadingOutlined";
|
||||
import EllipsisOutlinedIcon from "@ant-design/icons/EllipsisOutlined";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
export default function MenuButton({ doDelete, canEdit, mute, unmute, evaluate, muted }) {
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const execute = useCallback(action => {
|
||||
@@ -55,6 +55,9 @@ export default function MenuButton({ doDelete, canEdit, mute, unmute, muted }) {
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={confirmDelete}>Delete</PlainButton>
|
||||
</Menu.Item>
|
||||
<Menu.Item>
|
||||
<PlainButton onClick={() => execute(evaluate)}>Evaluate</PlainButton>
|
||||
</Menu.Item>
|
||||
</Menu>
|
||||
}>
|
||||
<Button aria-label="More actions">
|
||||
@@ -69,6 +72,7 @@ MenuButton.propTypes = {
|
||||
canEdit: PropTypes.bool.isRequired,
|
||||
mute: PropTypes.func.isRequired,
|
||||
unmute: PropTypes.func.isRequired,
|
||||
evaluate: PropTypes.func.isRequired,
|
||||
muted: PropTypes.bool,
|
||||
};
|
||||
|
||||
|
||||
@@ -380,9 +380,7 @@ function QuerySource(props) {
|
||||
<QueryVisualizationTabs
|
||||
queryResult={queryResult}
|
||||
visualizations={query.visualizations}
|
||||
showNewVisualizationButton={
|
||||
queryFlags.canEdit && queryResultData.status === ExecutionStatus.FINISHED
|
||||
}
|
||||
showNewVisualizationButton={queryFlags.canEdit && queryResultData.status === ExecutionStatus.DONE}
|
||||
canDeleteVisualizations={queryFlags.canEdit}
|
||||
selectedTab={selectedVisualization}
|
||||
onChangeTab={setSelectedVisualization}
|
||||
|
||||
@@ -165,7 +165,7 @@ function QueryView(props) {
|
||||
<QueryVisualizationTabs
|
||||
queryResult={queryResult}
|
||||
visualizations={query.visualizations}
|
||||
showNewVisualizationButton={queryFlags.canEdit && queryResultData.status === ExecutionStatus.FINISHED}
|
||||
showNewVisualizationButton={queryFlags.canEdit && queryResultData.status === ExecutionStatus.DONE}
|
||||
canDeleteVisualizations={queryFlags.canEdit}
|
||||
selectedTab={selectedVisualization}
|
||||
onChangeTab={setSelectedVisualization}
|
||||
|
||||
@@ -1,45 +1,37 @@
|
||||
import { includes } from "lodash";
|
||||
import React from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import Alert from "antd/lib/alert";
|
||||
import Button from "antd/lib/button";
|
||||
import Timer from "@/components/Timer";
|
||||
import { ExecutionStatus } from "@/services/query-result";
|
||||
|
||||
export default function QueryExecutionStatus({ status, updatedAt, error, isCancelling, onCancel }) {
|
||||
const alertType = status === ExecutionStatus.FAILED ? "error" : "info";
|
||||
const showTimer = status !== ExecutionStatus.FAILED && updatedAt;
|
||||
const isCancelButtonAvailable = [
|
||||
ExecutionStatus.SCHEDULED,
|
||||
ExecutionStatus.QUEUED,
|
||||
ExecutionStatus.STARTED,
|
||||
ExecutionStatus.DEFERRED,
|
||||
].includes(status);
|
||||
const alertType = status === "failed" ? "error" : "info";
|
||||
const showTimer = status !== "failed" && updatedAt;
|
||||
const isCancelButtonAvailable = includes(["waiting", "processing"], status);
|
||||
let message = isCancelling ? <React.Fragment>Cancelling…</React.Fragment> : null;
|
||||
|
||||
switch (status) {
|
||||
case ExecutionStatus.QUEUED:
|
||||
case "waiting":
|
||||
if (!isCancelling) {
|
||||
message = <React.Fragment>Query in queue…</React.Fragment>;
|
||||
}
|
||||
break;
|
||||
case ExecutionStatus.STARTED:
|
||||
case "processing":
|
||||
if (!isCancelling) {
|
||||
message = <React.Fragment>Executing query…</React.Fragment>;
|
||||
}
|
||||
break;
|
||||
case ExecutionStatus.LOADING_RESULT:
|
||||
case "loading-result":
|
||||
message = <React.Fragment>Loading results…</React.Fragment>;
|
||||
break;
|
||||
case ExecutionStatus.FAILED:
|
||||
case "failed":
|
||||
message = (
|
||||
<React.Fragment>
|
||||
Error running query: <strong>{error}</strong>
|
||||
</React.Fragment>
|
||||
);
|
||||
break;
|
||||
case ExecutionStatus.CANCELED:
|
||||
message = <React.Fragment>Query was canceled</React.Fragment>;
|
||||
break;
|
||||
// no default
|
||||
}
|
||||
|
||||
@@ -74,7 +66,7 @@ QueryExecutionStatus.propTypes = {
|
||||
};
|
||||
|
||||
QueryExecutionStatus.defaultProps = {
|
||||
status: ExecutionStatus.QUEUED,
|
||||
status: "waiting",
|
||||
updatedAt: null,
|
||||
error: null,
|
||||
isCancelling: true,
|
||||
|
||||
@@ -36,6 +36,7 @@ const Alert = {
|
||||
delete: data => axios.delete(`api/alerts/${data.id}`),
|
||||
mute: data => axios.post(`api/alerts/${data.id}/mute`),
|
||||
unmute: data => axios.delete(`api/alerts/${data.id}/mute`),
|
||||
evaluate: data => axios.post(`api/alerts/${data.id}/eval`),
|
||||
};
|
||||
|
||||
export default Alert;
|
||||
|
||||
@@ -61,7 +61,7 @@ class DateParameter extends Parameter {
|
||||
return value;
|
||||
}
|
||||
|
||||
const normalizedValue = moment(value);
|
||||
const normalizedValue = moment(value, moment.ISO_8601, true);
|
||||
return normalizedValue.isValid() ? normalizedValue : null;
|
||||
}
|
||||
|
||||
|
||||
29
client/app/services/parameters/TextPatternParameter.js
Normal file
29
client/app/services/parameters/TextPatternParameter.js
Normal file
@@ -0,0 +1,29 @@
|
||||
import { toString, isNull } from "lodash";
|
||||
import Parameter from "./Parameter";
|
||||
|
||||
class TextPatternParameter extends Parameter {
|
||||
constructor(parameter, parentQueryId) {
|
||||
super(parameter, parentQueryId);
|
||||
this.regex = parameter.regex;
|
||||
this.setValue(parameter.value);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
normalizeValue(value) {
|
||||
const normalizedValue = toString(value);
|
||||
if (isNull(normalizedValue)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var re = new RegExp(this.regex);
|
||||
|
||||
if (re !== null) {
|
||||
if (re.test(normalizedValue)) {
|
||||
return normalizedValue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default TextPatternParameter;
|
||||
@@ -5,6 +5,7 @@ import EnumParameter from "./EnumParameter";
|
||||
import QueryBasedDropdownParameter from "./QueryBasedDropdownParameter";
|
||||
import DateParameter from "./DateParameter";
|
||||
import DateRangeParameter from "./DateRangeParameter";
|
||||
import TextPatternParameter from "./TextPatternParameter";
|
||||
|
||||
function createParameter(param, parentQueryId) {
|
||||
switch (param.type) {
|
||||
@@ -22,6 +23,8 @@ function createParameter(param, parentQueryId) {
|
||||
case "datetime-range":
|
||||
case "datetime-range-with-seconds":
|
||||
return new DateRangeParameter(param, parentQueryId);
|
||||
case "text-pattern":
|
||||
return new TextPatternParameter({ ...param, type: "text-pattern" }, parentQueryId);
|
||||
default:
|
||||
return new TextParameter({ ...param, type: "text" }, parentQueryId);
|
||||
}
|
||||
@@ -34,6 +37,7 @@ function cloneParameter(param) {
|
||||
export {
|
||||
Parameter,
|
||||
TextParameter,
|
||||
TextPatternParameter,
|
||||
NumberParameter,
|
||||
EnumParameter,
|
||||
QueryBasedDropdownParameter,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
createParameter,
|
||||
TextParameter,
|
||||
TextPatternParameter,
|
||||
NumberParameter,
|
||||
EnumParameter,
|
||||
QueryBasedDropdownParameter,
|
||||
@@ -12,6 +13,7 @@ describe("Parameter", () => {
|
||||
describe("create", () => {
|
||||
const parameterTypes = [
|
||||
["text", TextParameter],
|
||||
["text-pattern", TextPatternParameter],
|
||||
["number", NumberParameter],
|
||||
["enum", EnumParameter],
|
||||
["query", QueryBasedDropdownParameter],
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
import { createParameter } from "..";
|
||||
|
||||
describe("TextPatternParameter", () => {
|
||||
let param;
|
||||
|
||||
beforeEach(() => {
|
||||
param = createParameter({ name: "param", title: "Param", type: "text-pattern", regex: "a+" });
|
||||
});
|
||||
|
||||
describe("noramlizeValue", () => {
|
||||
test("converts matching strings", () => {
|
||||
const normalizedValue = param.normalizeValue("art");
|
||||
expect(normalizedValue).toBe("art");
|
||||
});
|
||||
|
||||
test("returns null when string does not match pattern", () => {
|
||||
const normalizedValue = param.normalizeValue("brt");
|
||||
expect(normalizedValue).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -50,15 +50,18 @@ const QueryResultResource = {
|
||||
};
|
||||
|
||||
export const ExecutionStatus = {
|
||||
QUEUED: "queued",
|
||||
STARTED: "started",
|
||||
FINISHED: "finished",
|
||||
WAITING: "waiting",
|
||||
PROCESSING: "processing",
|
||||
DONE: "done",
|
||||
FAILED: "failed",
|
||||
LOADING_RESULT: "loading-result",
|
||||
CANCELED: "canceled",
|
||||
DEFERRED: "deferred",
|
||||
SCHEDULED: "scheduled",
|
||||
STOPPED: "stopped",
|
||||
};
|
||||
|
||||
const statuses = {
|
||||
1: ExecutionStatus.WAITING,
|
||||
2: ExecutionStatus.PROCESSING,
|
||||
3: ExecutionStatus.DONE,
|
||||
4: ExecutionStatus.FAILED,
|
||||
};
|
||||
|
||||
function handleErrorResponse(queryResult, error) {
|
||||
@@ -77,7 +80,7 @@ function handleErrorResponse(queryResult, error) {
|
||||
queryResult.update({
|
||||
job: {
|
||||
error: "cached query result unavailable, please execute again.",
|
||||
status: ExecutionStatus.FAILED,
|
||||
status: 4,
|
||||
},
|
||||
});
|
||||
return;
|
||||
@@ -88,7 +91,7 @@ function handleErrorResponse(queryResult, error) {
|
||||
queryResult.update({
|
||||
job: {
|
||||
error: get(error, "response.data.message", "Unknown error occurred. Please try again later."),
|
||||
status: ExecutionStatus.FAILED,
|
||||
status: 4,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -99,19 +102,11 @@ function sleep(ms) {
|
||||
|
||||
export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
return axios.get(`api/jobs/${jobId}`).then(data => {
|
||||
const status = data.job.status;
|
||||
if (
|
||||
[ExecutionStatus.QUEUED, ExecutionStatus.STARTED, ExecutionStatus.SCHEDULED, ExecutionStatus.DEFERRED].includes(
|
||||
status
|
||||
)
|
||||
) {
|
||||
const status = statuses[data.job.status];
|
||||
if (status === ExecutionStatus.WAITING || status === ExecutionStatus.PROCESSING) {
|
||||
return sleep(interval).then(() => fetchDataFromJob(data.job.id));
|
||||
} else if (status === ExecutionStatus.FINISHED) {
|
||||
return data.job.result_id;
|
||||
} else if (status === ExecutionStatus.CANCELED) {
|
||||
return Promise.reject("Job was canceled");
|
||||
} else if (status === ExecutionStatus.STOPPED) {
|
||||
return Promise.reject("Job was stopped");
|
||||
} else if (status === ExecutionStatus.DONE) {
|
||||
return data.job.result;
|
||||
} else if (status === ExecutionStatus.FAILED) {
|
||||
return Promise.reject(data.job.error);
|
||||
}
|
||||
@@ -119,7 +114,7 @@ export function fetchDataFromJob(jobId, interval = 1000) {
|
||||
}
|
||||
|
||||
export function isDateTime(v) {
|
||||
return isString(v) && moment(v).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
|
||||
return isString(v) && moment(v, moment.ISO_8601, true).isValid() && /^\d{4}-\d{2}-\d{2}T/.test(v);
|
||||
}
|
||||
|
||||
class QueryResult {
|
||||
@@ -127,7 +122,7 @@ class QueryResult {
|
||||
this.deferred = defer();
|
||||
this.job = {};
|
||||
this.query_result = {};
|
||||
this.status = ExecutionStatus.QUEUED;
|
||||
this.status = "waiting";
|
||||
|
||||
this.updatedAt = moment();
|
||||
|
||||
@@ -143,8 +138,8 @@ class QueryResult {
|
||||
extend(this, props);
|
||||
|
||||
if ("query_result" in props) {
|
||||
this.status = ExecutionStatus.FINISHED;
|
||||
this.deferred.onStatusChange(ExecutionStatus.FINISHED);
|
||||
this.status = ExecutionStatus.DONE;
|
||||
this.deferred.onStatusChange(ExecutionStatus.DONE);
|
||||
|
||||
const columnTypes = {};
|
||||
|
||||
@@ -188,10 +183,11 @@ class QueryResult {
|
||||
});
|
||||
|
||||
this.deferred.resolve(this);
|
||||
} else if (this.job.status === ExecutionStatus.STARTED || this.job.status === ExecutionStatus.FINISHED) {
|
||||
this.status = ExecutionStatus.STARTED;
|
||||
} else if (this.job.status === ExecutionStatus.FAILED) {
|
||||
this.status = this.job.status;
|
||||
} else if (this.job.status === 3 || this.job.status === 2) {
|
||||
this.deferred.onStatusChange(ExecutionStatus.PROCESSING);
|
||||
this.status = "processing";
|
||||
} else if (this.job.status === 4) {
|
||||
this.status = statuses[this.job.status];
|
||||
this.deferred.reject(new QueryResultError(this.job.error));
|
||||
} else {
|
||||
this.deferred.onStatusChange(undefined);
|
||||
@@ -215,7 +211,7 @@ class QueryResult {
|
||||
if (this.isLoadingResult) {
|
||||
return ExecutionStatus.LOADING_RESULT;
|
||||
}
|
||||
return this.status || this.job.status;
|
||||
return this.status || statuses[this.job.status];
|
||||
}
|
||||
|
||||
getError() {
|
||||
@@ -378,7 +374,7 @@ class QueryResult {
|
||||
this.isLoadingResult = true;
|
||||
this.deferred.onStatusChange(ExecutionStatus.LOADING_RESULT);
|
||||
|
||||
QueryResultResource.get({ id: this.job.result_id })
|
||||
QueryResultResource.get({ id: this.job.query_result_id })
|
||||
.then(response => {
|
||||
this.update(response);
|
||||
this.isLoadingResult = false;
|
||||
@@ -393,7 +389,7 @@ class QueryResult {
|
||||
this.update({
|
||||
job: {
|
||||
error: "failed communicating with server. Please check your Internet connection and try again.",
|
||||
status: ExecutionStatus.FAILED,
|
||||
status: 4,
|
||||
},
|
||||
});
|
||||
this.isLoadingResult = false;
|
||||
@@ -417,9 +413,9 @@ class QueryResult {
|
||||
.then(jobResponse => {
|
||||
this.update(jobResponse);
|
||||
|
||||
if (this.getStatus() === ExecutionStatus.STARTED && this.job.result_id && this.job.result_id !== "None") {
|
||||
if (this.getStatus() === "processing" && this.job.query_result_id && this.job.query_result_id !== "None") {
|
||||
loadResult();
|
||||
} else if (this.getStatus() !== ExecutionStatus.FAILED) {
|
||||
} else if (this.getStatus() !== "failed") {
|
||||
const waitTime = tryNumber > 10 ? 3000 : 500;
|
||||
setTimeout(() => {
|
||||
this.refreshStatus(query, parameters, tryNumber + 1);
|
||||
@@ -432,7 +428,7 @@ class QueryResult {
|
||||
this.update({
|
||||
job: {
|
||||
error: "failed communicating with server. Please check your Internet connection and try again.",
|
||||
status: ExecutionStatus.FAILED,
|
||||
status: 4,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,6 @@ import moment from "moment";
|
||||
import debug from "debug";
|
||||
import Mustache from "mustache";
|
||||
import { axios } from "@/services/axios";
|
||||
import { ExecutionStatus } from "@/services/query-result";
|
||||
import {
|
||||
zipObject,
|
||||
isEmpty,
|
||||
@@ -104,7 +103,7 @@ export class Query {
|
||||
return new QueryResult({
|
||||
job: {
|
||||
error: `missing ${valuesWord} for ${missingParams.join(", ")} ${paramsWord}.`,
|
||||
status: ExecutionStatus.FAILED,
|
||||
status: 4,
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -361,7 +360,7 @@ export class QueryResultError {
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
getStatus() {
|
||||
return ExecutionStatus.FAILED;
|
||||
return "failed";
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
|
||||
@@ -43,18 +43,18 @@ function seedDatabase(seedValues) {
|
||||
|
||||
function buildServer() {
|
||||
console.log("Building the server...");
|
||||
execSync("docker compose build", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress build", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function startServer() {
|
||||
console.log("Starting the server...");
|
||||
execSync("docker compose up -d", { stdio: "inherit" });
|
||||
execSync("docker compose run server create_db", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress up -d", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress run server create_db", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function stopServer() {
|
||||
console.log("Stopping the server...");
|
||||
execSync("docker compose down", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress down", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function runCypressCI() {
|
||||
@@ -68,7 +68,7 @@ function runCypressCI() {
|
||||
}
|
||||
|
||||
execSync(
|
||||
"docker compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
"COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
}
|
||||
|
||||
@@ -53,12 +53,11 @@ describe("Dashboard Sharing", () => {
|
||||
};
|
||||
|
||||
const dashboardUrl = this.dashboardUrl;
|
||||
cy.createQuery({ options }).then(({ id: queryId, name: queryName }) => {
|
||||
cy.createQuery({ options }).then(({ id: queryId }) => {
|
||||
cy.visit(dashboardUrl);
|
||||
editDashboard();
|
||||
cy.getByTestId("AddWidgetButton").click();
|
||||
cy.getByTestId("AddWidgetDialog").within(() => {
|
||||
cy.get("input").type(queryName);
|
||||
cy.get(`.query-selector-result[data-test="QueryId${queryId}"]`).click();
|
||||
});
|
||||
cy.contains("button", "Add to Dashboard").click();
|
||||
@@ -179,12 +178,11 @@ describe("Dashboard Sharing", () => {
|
||||
};
|
||||
|
||||
const dashboardUrl = this.dashboardUrl;
|
||||
cy.createQuery({ options }).then(({ id: queryId, name: queryName }) => {
|
||||
cy.createQuery({ options }).then(({ id: queryId }) => {
|
||||
cy.visit(dashboardUrl);
|
||||
editDashboard();
|
||||
cy.getByTestId("AddWidgetButton").click();
|
||||
cy.getByTestId("AddWidgetDialog").within(() => {
|
||||
cy.get("input").type(queryName);
|
||||
cy.get(`.query-selector-result[data-test="QueryId${queryId}"]`).click();
|
||||
});
|
||||
cy.contains("button", "Add to Dashboard").click();
|
||||
|
||||
@@ -18,12 +18,11 @@ describe("Widget", () => {
|
||||
};
|
||||
|
||||
it("adds widget", function() {
|
||||
cy.createQuery().then(({ id: queryId, name: queryName }) => {
|
||||
cy.createQuery().then(({ id: queryId }) => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
editDashboard();
|
||||
cy.getByTestId("AddWidgetButton").click();
|
||||
cy.getByTestId("AddWidgetDialog").within(() => {
|
||||
cy.get("input").type(queryName);
|
||||
cy.get(`.query-selector-result[data-test="QueryId${queryId}"]`).click();
|
||||
});
|
||||
cy.contains("button", "Add to Dashboard").click();
|
||||
|
||||
@@ -2,16 +2,14 @@ import { dragParam } from "../../support/parameters";
|
||||
import dayjs from "dayjs";
|
||||
|
||||
function openAndSearchAntdDropdown(testId, paramOption) {
|
||||
cy.getByTestId(testId)
|
||||
.find(".ant-select-selection-search-input")
|
||||
.type(paramOption, { force: true });
|
||||
cy.getByTestId(testId).find(".ant-select-selection-search-input").type(paramOption, { force: true });
|
||||
}
|
||||
|
||||
describe("Parameter", () => {
|
||||
const expectDirtyStateChange = edit => {
|
||||
const expectDirtyStateChange = (edit) => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".parameter-input")
|
||||
.should($el => {
|
||||
.should(($el) => {
|
||||
assert.isUndefined($el.data("dirty"));
|
||||
});
|
||||
|
||||
@@ -19,7 +17,7 @@ describe("Parameter", () => {
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".parameter-input")
|
||||
.should($el => {
|
||||
.should(($el) => {
|
||||
assert.isTrue($el.data("dirty"));
|
||||
});
|
||||
};
|
||||
@@ -42,9 +40,7 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -53,13 +49,66 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("Redash");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Text Pattern Parameter", () => {
|
||||
beforeEach(() => {
|
||||
const queryData = {
|
||||
name: "Text Pattern Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
options: {
|
||||
parameters: [{ name: "test-parameter", title: "Test Parameter", type: "text-pattern", regex: "a.*a" }],
|
||||
},
|
||||
};
|
||||
|
||||
cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", "arta");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arounda");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", "arounda");
|
||||
});
|
||||
|
||||
it("throws error message with invalid query request", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}abcab");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("QueryExecutionStatus").should("exist");
|
||||
});
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}arta");
|
||||
});
|
||||
});
|
||||
|
||||
it("doesn't let user save invalid regex", () => {
|
||||
cy.get(".fa-cog").click();
|
||||
cy.getByTestId("RegexPatternInput").type("{selectall}[");
|
||||
cy.contains("Invalid Regex Pattern").should("exist");
|
||||
cy.getByTestId("SaveParameterSettings").click();
|
||||
cy.get(".fa-cog").click();
|
||||
cy.getByTestId("RegexPatternInput").should("not.equal", "[");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Number Parameter", () => {
|
||||
beforeEach(() => {
|
||||
const queryData = {
|
||||
@@ -74,17 +123,13 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates the results after clicking Apply", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}42");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", 42);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}31415");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}31415");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -93,9 +138,7 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.type("{selectall}42");
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").type("{selectall}42");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -119,10 +162,7 @@ describe("Parameter", () => {
|
||||
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
|
||||
|
||||
// only the filtered option should be on the DOM
|
||||
cy.get(".ant-select-item-option")
|
||||
.should("have.length", 1)
|
||||
.and("contain", "value2")
|
||||
.click();
|
||||
cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
// ensure that query is being executed
|
||||
@@ -140,12 +180,10 @@ describe("Parameter", () => {
|
||||
SaveParameterSettings
|
||||
`);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select-selection-search")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select-selection-search").click();
|
||||
|
||||
// select all unselected options
|
||||
cy.get(".ant-select-item-option").each($option => {
|
||||
cy.get(".ant-select-item-option").each(($option) => {
|
||||
if (!$option.hasClass("ant-select-item-option-selected")) {
|
||||
cy.wrap($option).click();
|
||||
}
|
||||
@@ -160,9 +198,7 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
|
||||
|
||||
cy.contains(".ant-select-item-option", "value2").click();
|
||||
});
|
||||
@@ -176,7 +212,7 @@ describe("Parameter", () => {
|
||||
name: "Dropdown Query",
|
||||
query: "",
|
||||
};
|
||||
cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
|
||||
cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
|
||||
const queryData = {
|
||||
name: "Query Based Dropdown Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
@@ -208,7 +244,7 @@ describe("Parameter", () => {
|
||||
SELECT 'value2' AS name, 2 AS value UNION ALL
|
||||
SELECT 'value3' AS name, 3 AS value`,
|
||||
};
|
||||
cy.createQuery(dropdownQueryData, true).then(dropdownQuery => {
|
||||
cy.createQuery(dropdownQueryData, true).then((dropdownQuery) => {
|
||||
const queryData = {
|
||||
name: "Query Based Dropdown Parameter",
|
||||
query: "SELECT '{{test-parameter}}' AS parameter",
|
||||
@@ -234,10 +270,7 @@ describe("Parameter", () => {
|
||||
openAndSearchAntdDropdown("ParameterName-test-parameter", "value2"); // asserts option filter prop
|
||||
|
||||
// only the filtered option should be on the DOM
|
||||
cy.get(".ant-select-item-option")
|
||||
.should("have.length", 1)
|
||||
.and("contain", "value2")
|
||||
.click();
|
||||
cy.get(".ant-select-item-option").should("have.length", 1).and("contain", "value2").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
// ensure that query is being executed
|
||||
@@ -255,12 +288,10 @@ describe("Parameter", () => {
|
||||
SaveParameterSettings
|
||||
`);
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find(".ant-select")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find(".ant-select").click();
|
||||
|
||||
// make sure all options are unselected and select all
|
||||
cy.get(".ant-select-item-option").each($option => {
|
||||
cy.get(".ant-select-item-option").each(($option) => {
|
||||
expect($option).not.to.have.class("ant-select-dropdown-menu-item-selected");
|
||||
cy.wrap($option).click();
|
||||
});
|
||||
@@ -274,14 +305,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
});
|
||||
|
||||
const selectCalendarDate = date => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.click();
|
||||
const selectCalendarDate = (date) => {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", date)
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", date).click();
|
||||
};
|
||||
|
||||
describe("Date Parameter", () => {
|
||||
@@ -303,10 +330,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then(clock => clock.restore());
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date", function() {
|
||||
it("updates the results after selecting a date", function () {
|
||||
selectCalendarDate("15");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
@@ -314,12 +341,10 @@ describe("Parameter", () => {
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("15/MM/YY"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function() {
|
||||
it("allows picking a dynamic date", function () {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Today/Now")
|
||||
.click();
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -350,14 +375,11 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then(clock => clock.restore());
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date and clicking in ok", function() {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.click();
|
||||
it("updates the results after selecting a date and clicking in ok", function () {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
|
||||
|
||||
selectCalendarDate("15");
|
||||
|
||||
@@ -368,27 +390,20 @@ describe("Parameter", () => {
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-15 HH:mm"));
|
||||
});
|
||||
|
||||
it("shows the current datetime after clicking in Now", function() {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.click();
|
||||
it("shows the current datetime after clicking in Now", function () {
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").as("Input").click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains("Now")
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains("Now").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
cy.getByTestId("TableVisualization").should("contain", dayjs(this.now).format("YYYY-MM-DD HH:mm"));
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date", function() {
|
||||
it("allows picking a dynamic date", function () {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Today/Now")
|
||||
.click();
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Today/Now").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -397,31 +412,20 @@ describe("Parameter", () => {
|
||||
|
||||
it("sets dirty state when edited", () => {
|
||||
expectDirtyStateChange(() => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains("Now")
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains("Now").click();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Date Range Parameter", () => {
|
||||
const selectCalendarDateRange = (startDate, endDate) => {
|
||||
cy.getByTestId("ParameterName-test-parameter")
|
||||
.find("input")
|
||||
.first()
|
||||
.click();
|
||||
cy.getByTestId("ParameterName-test-parameter").find("input").first().click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", startDate)
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", startDate).click();
|
||||
|
||||
cy.get(".ant-picker-panel")
|
||||
.contains(".ant-picker-cell-inner", endDate)
|
||||
.click();
|
||||
cy.get(".ant-picker-panel").contains(".ant-picker-cell-inner", endDate).click();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -442,10 +446,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cy.clock().then(clock => clock.restore());
|
||||
cy.clock().then((clock) => clock.restore());
|
||||
});
|
||||
|
||||
it("updates the results after selecting a date range", function() {
|
||||
it("updates the results after selecting a date range", function () {
|
||||
selectCalendarDateRange("15", "20");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
@@ -457,12 +461,10 @@ describe("Parameter", () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("allows picking a dynamic date range", function() {
|
||||
it("allows picking a dynamic date range", function () {
|
||||
cy.getByTestId("DynamicButton").click();
|
||||
|
||||
cy.getByTestId("DynamicButtonMenu")
|
||||
.contains("Last month")
|
||||
.click();
|
||||
cy.getByTestId("DynamicButtonMenu").contains("Last month").click();
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").click();
|
||||
|
||||
@@ -479,15 +481,10 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
describe("Apply Changes", () => {
|
||||
const expectAppliedChanges = apply => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.type("Redash");
|
||||
const expectAppliedChanges = (apply) => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-2")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
|
||||
|
||||
cy.location("search").should("not.contain", "Redash");
|
||||
|
||||
@@ -523,10 +520,7 @@ describe("Parameter", () => {
|
||||
it("shows and hides according to parameter dirty state", () => {
|
||||
cy.getByTestId("ParameterApplyButton").should("not.be", "visible");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Param")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Param").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton").should("be.visible");
|
||||
|
||||
@@ -536,21 +530,13 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it("updates dirty counter", () => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton")
|
||||
.find(".ant-badge-count p.current")
|
||||
.should("contain", "1");
|
||||
cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "1");
|
||||
|
||||
cy.getByTestId("ParameterName-test-parameter-2")
|
||||
.find("input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-2").find("input").type("Redash");
|
||||
|
||||
cy.getByTestId("ParameterApplyButton")
|
||||
.find(".ant-badge-count p.current")
|
||||
.should("contain", "2");
|
||||
cy.getByTestId("ParameterApplyButton").find(".ant-badge-count p.current").should("contain", "2");
|
||||
});
|
||||
|
||||
it('applies changes from "Apply Changes" button', () => {
|
||||
@@ -560,16 +546,13 @@ describe("Parameter", () => {
|
||||
});
|
||||
|
||||
it('applies changes from "alt+enter" keyboard shortcut', () => {
|
||||
expectAppliedChanges(input => {
|
||||
expectAppliedChanges((input) => {
|
||||
input.type("{alt}{enter}");
|
||||
});
|
||||
});
|
||||
|
||||
it('disables "Execute" button', () => {
|
||||
cy.getByTestId("ParameterName-test-parameter-1")
|
||||
.find("input")
|
||||
.as("Input")
|
||||
.type("Redash");
|
||||
cy.getByTestId("ParameterName-test-parameter-1").find("input").as("Input").type("Redash");
|
||||
cy.getByTestId("ExecuteButton").should("be.disabled");
|
||||
|
||||
cy.get("@Input").clear();
|
||||
@@ -594,15 +577,12 @@ describe("Parameter", () => {
|
||||
|
||||
cy.createQuery(queryData, false).then(({ id }) => cy.visit(`/queries/${id}/source`));
|
||||
|
||||
cy.get(".parameter-block")
|
||||
.first()
|
||||
.invoke("width")
|
||||
.as("paramWidth");
|
||||
cy.get(".parameter-block").first().invoke("width").as("paramWidth");
|
||||
|
||||
cy.get("body").type("{alt}D"); // hide schema browser
|
||||
});
|
||||
|
||||
it("is possible to rearrange parameters", function() {
|
||||
it("is possible to rearrange parameters", function () {
|
||||
cy.server();
|
||||
cy.route("POST", "**/api/queries/*").as("QuerySave");
|
||||
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
const loremIpsum =
|
||||
"Lorem ipsum dolor sit amet consectetur adipiscing elit" +
|
||||
"sed do eiusmod tempor incididunt ut labore et dolore magna aliqua";
|
||||
|
||||
export const query = `
|
||||
SELECT '${loremIpsum}' AS a, '${loremIpsum}' AS b, '${loremIpsum}' AS c, '${loremIpsum}' AS d, '${loremIpsum}' as e
|
||||
`;
|
||||
|
||||
export const config = {
|
||||
itemsPerPage: 10,
|
||||
columns: [
|
||||
{
|
||||
name: "a",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "c",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "d",
|
||||
displayAs: "string",
|
||||
},
|
||||
{
|
||||
name: "e",
|
||||
displayAs: "string",
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import * as AllCellTypes from "./.mocks/all-cell-types";
|
||||
import * as MultiColumnSort from "./.mocks/multi-column-sort";
|
||||
import * as SearchInData from "./.mocks/search-in-data";
|
||||
import * as LargeDataset from "./.mocks/large-dataset";
|
||||
import * as WideDataSet from "./.mocks/wide-dataset";
|
||||
|
||||
function prepareVisualization(query, type, name, options) {
|
||||
return cy
|
||||
@@ -98,6 +99,50 @@ describe("Table", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("Fixing columns", () => {
|
||||
it("fixes the correct number of columns", () => {
|
||||
const { query, config } = WideDataSet;
|
||||
prepareVisualization(query, "TABLE", "All cell types", config);
|
||||
cy.getByTestId("EditVisualization").click();
|
||||
cy.contains("span", "Grid").click();
|
||||
cy.getByTestId("FixedColumns").click();
|
||||
cy.contains(".ant-select-item-option-content", "1").click();
|
||||
cy.contains("Save").click();
|
||||
// eslint-disable-next-line cypress/no-unnecessary-waiting
|
||||
cy.wait(500); //add some waiting to make sure table visualization is saved
|
||||
|
||||
cy.get(".ant-table-thead")
|
||||
.find("th.ant-table-cell-fix-left")
|
||||
.then(fixedCols => {
|
||||
expect(fixedCols.length).to.equal(1);
|
||||
});
|
||||
|
||||
cy.get(".ant-table-content").scrollTo("right", { duration: 1000 });
|
||||
cy.get(".ant-table-content").scrollTo("left", { duration: 1000 });
|
||||
});
|
||||
|
||||
it("doesn't let user fix too many columns", () => {
|
||||
const { query, config } = MultiColumnSort;
|
||||
prepareVisualization(query, "TABLE", "Test data", config);
|
||||
cy.getByTestId("EditVisualization").click();
|
||||
cy.contains("span", "Grid").click();
|
||||
cy.getByTestId("FixedColumns").click();
|
||||
cy.get(".ant-select-item-option-content");
|
||||
cy.contains(".ant-select-item-option-content", "3").should("not.exist");
|
||||
cy.contains(".ant-select-item-option-content", "4").should("not.exist");
|
||||
});
|
||||
|
||||
it("doesn't cause issues when freezing column off of page", () => {
|
||||
const { query, config } = WideDataSet;
|
||||
prepareVisualization(query, "TABLE", "Test data", config);
|
||||
cy.getByTestId("EditVisualization").click();
|
||||
cy.contains("span", "Grid").click();
|
||||
cy.getByTestId("FixedColumns").click();
|
||||
cy.contains(".ant-select-item-option-content", "4").click();
|
||||
cy.contains("Save").click();
|
||||
});
|
||||
});
|
||||
|
||||
it("searches in multiple columns", () => {
|
||||
const { query, config } = SearchInData;
|
||||
prepareVisualization(query, "TABLE", "Search", config).then(({ visualizationId }) => {
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
const { extend, get, merge, find } = Cypress._;
|
||||
|
||||
const post = options =>
|
||||
const post = (options) =>
|
||||
cy
|
||||
.getCookie("csrf_token")
|
||||
.then(csrf => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
.then((csrf) => cy.request({ ...options, method: "POST", headers: { "X-CSRF-TOKEN": csrf.value } }));
|
||||
|
||||
Cypress.Commands.add("createDashboard", name => {
|
||||
Cypress.Commands.add("createDashboard", (name) => {
|
||||
return post({ url: "api/dashboards", body: { name } }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -28,7 +28,7 @@ Cypress.Commands.add("createQuery", (data, shouldPublish = true) => {
|
||||
// eslint-disable-next-line cypress/no-assigning-return-values
|
||||
let request = post({ url: "/api/queries", body: merged }).then(({ body }) => body);
|
||||
if (shouldPublish) {
|
||||
request = request.then(query =>
|
||||
request = request.then((query) =>
|
||||
post({ url: `/api/queries/${query.id}`, body: { is_draft: false } }).then(() => query)
|
||||
);
|
||||
}
|
||||
@@ -86,6 +86,7 @@ Cypress.Commands.add("addWidget", (dashboardId, visualizationId, options = {}) =
|
||||
Cypress.Commands.add("createAlert", (queryId, options = {}, name) => {
|
||||
const defaultOptions = {
|
||||
column: "?column?",
|
||||
selector: "first",
|
||||
op: "greater than",
|
||||
rearm: 0,
|
||||
value: 1,
|
||||
@@ -109,7 +110,7 @@ Cypress.Commands.add("createUser", ({ name, email, password }) => {
|
||||
url: "api/users?no_invite=yes",
|
||||
body: { name, email },
|
||||
failOnStatusCode: false,
|
||||
}).then(xhr => {
|
||||
}).then((xhr) => {
|
||||
const { status, body } = xhr;
|
||||
if (status < 200 || status > 400) {
|
||||
throw new Error(xhr);
|
||||
@@ -146,7 +147,7 @@ Cypress.Commands.add("getDestinations", () => {
|
||||
Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) => {
|
||||
return cy
|
||||
.getDestinations()
|
||||
.then(destinations => {
|
||||
.then((destinations) => {
|
||||
const destination = find(destinations, { name: destinationName });
|
||||
if (!destination) {
|
||||
throw new Error("Destination not found");
|
||||
@@ -166,6 +167,6 @@ Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) =>
|
||||
});
|
||||
});
|
||||
|
||||
Cypress.Commands.add("updateOrgSettings", settings => {
|
||||
Cypress.Commands.add("updateOrgSettings", (settings) => {
|
||||
return post({ url: "api/settings/organization", body: settings }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
export function expectTableToHaveLength(length) {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr")
|
||||
.find("tbody tr.ant-table-row")
|
||||
.should("have.length", length);
|
||||
}
|
||||
|
||||
export function expectFirstColumnToHaveMembers(values) {
|
||||
cy.getByTestId("TableVisualization")
|
||||
.find("tbody tr td:first-child")
|
||||
.find("tbody tr.ant-table-row td:first-child")
|
||||
.then($cell => Cypress.$.map($cell, item => Cypress.$(item).text()))
|
||||
.then(firstColumnCells => expect(firstColumnCells).to.have.members(values));
|
||||
}
|
||||
|
||||
91
compose.yaml
91
compose.yaml
@@ -1,81 +1,70 @@
|
||||
# This configuration file is for the **development** setup.
|
||||
# For a production example please refer to getredash/setup repository on GitHub.
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
skip_frontend_build: "true" # set to empty string to build
|
||||
volumes:
|
||||
- .:/app
|
||||
env_file:
|
||||
- .env
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_MAIL_DEFAULT_SENDER: "redash@example.com"
|
||||
REDASH_MAIL_SERVER: "email"
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_GUNICORN_TIMEOUT: 60
|
||||
# Set secret keys in the .env file
|
||||
services:
|
||||
server:
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
command: server
|
||||
<<: *redash-service
|
||||
command: dev_server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "${REDASH_PORT:-5001}:5000"
|
||||
- "5001:5000"
|
||||
- "5678:5678"
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
scheduler:
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
profiles:
|
||||
- e2e
|
||||
- local
|
||||
command: scheduler
|
||||
depends_on:
|
||||
- server
|
||||
worker:
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
profiles:
|
||||
- e2e
|
||||
- local
|
||||
command: worker
|
||||
<<: *redash-service
|
||||
command: dev_scheduler
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
worker:
|
||||
<<: *redash-service
|
||||
command: dev_worker
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-15432}:5432"
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
# improvement on my personal machine). We should consider moving this into a dedicated Docker Compose configuration for
|
||||
# tests.
|
||||
command: postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
email:
|
||||
image: maildev/maildev
|
||||
ports:
|
||||
- "1080:1080"
|
||||
- "1025:1025"
|
||||
restart: unless-stopped
|
||||
cypress:
|
||||
ipc: host
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.cypress
|
||||
profiles:
|
||||
- e2e
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
- scheduler
|
||||
environment:
|
||||
CYPRESS_baseUrl: http://server:5000
|
||||
PERCY_TOKEN: ${PERCY_TOKEN:-""}
|
||||
PERCY_BRANCH: ${PERCY_BRANCH:-""}
|
||||
PERCY_COMMIT: ${PERCY_COMMIT:-""}
|
||||
PERCY_PULL_REQUEST: ${PERCY_PULL_REQUEST:-}
|
||||
COMMIT_INFO_BRANCH: ${COMMIT_INFO_BRANCH:-""}
|
||||
COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE:-""}
|
||||
COMMIT_INFO_AUTHOR: ${COMMIT_INFO_AUTHOR:-""}
|
||||
COMMIT_INFO_SHA: ${COMMIT_INFO_SHA:-""}
|
||||
COMMIT_INFO_REMOTE: ${COMMIT_INFO_REMOTE:-""}
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID:-""}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY:-""}
|
||||
CYPRESS_COVERAGE: ${CYPRESS_COVERAGE:-true}
|
||||
|
||||
@@ -24,56 +24,56 @@ def upgrade():
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='schedule::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='additional_properties::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='settings::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='layout::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='change::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('visualizations', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
op.alter_column('widgets', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
@@ -83,53 +83,53 @@ def downgrade():
|
||||
type_=sa.Text(),
|
||||
postgresql_using='options::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
)
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='schedule::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
)
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='additional_properties::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
)
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='settings::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
)
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::json"))
|
||||
)
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
server_default=sa.text("'{}'::json"))
|
||||
)
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='layout::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
)
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='change::json',
|
||||
server_default=sa.text("'{}'::json"))
|
||||
)
|
||||
op.alter_column('visualizations', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
)
|
||||
op.alter_column('widgets', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
)
|
||||
|
||||
@@ -15,6 +15,7 @@ from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
)
|
||||
@@ -44,14 +45,7 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -14,7 +14,10 @@ from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.base import key_type
|
||||
from redash.models.types import EncryptedConfiguration
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
)
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -42,14 +45,7 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -28,7 +28,7 @@ def upgrade():
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
update_query = """
|
||||
update users
|
||||
set details = details::jsonb || ('{"profile_image_url": "' || profile_image_url || '"}')::jsonb
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "24.05.0-dev",
|
||||
"version": "24.08.1-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
@@ -24,7 +24,7 @@
|
||||
"jest": "TZ=Africa/Khartoum jest",
|
||||
"test": "run-s type-check jest",
|
||||
"test:watch": "jest --watch",
|
||||
"cypress": "COMPOSE_PROFILES=local node client/cypress/cypress.js",
|
||||
"cypress": "node client/cypress/cypress.js",
|
||||
"preinstall": "cd viz-lib && yarn link --link-folder ../.yarn",
|
||||
"postinstall": "(cd viz-lib && yarn --frozen-lockfile && yarn build:babel) && yarn link --link-folder ./.yarn @redash/viz"
|
||||
},
|
||||
|
||||
270
poetry.lock
generated
270
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "adal"
|
||||
@@ -515,13 +515,13 @@ graph = ["gremlinpython (==3.3.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2023.11.17"
|
||||
version = "2024.7.4"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"},
|
||||
{file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"},
|
||||
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
|
||||
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -891,47 +891,56 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "41.0.6"
|
||||
version = "42.0.8"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"},
|
||||
{file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"},
|
||||
{file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"},
|
||||
{file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"},
|
||||
{file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"},
|
||||
{file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"},
|
||||
{file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = ">=1.12"
|
||||
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
|
||||
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
|
||||
docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
|
||||
nox = ["nox"]
|
||||
pep8test = ["black", "check-sdist", "mypy", "ruff"]
|
||||
pep8test = ["check-sdist", "click", "mypy", "ruff"]
|
||||
sdist = ["build"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||
test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -1984,13 +1993,13 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.3"
|
||||
version = "3.1.4"
|
||||
description = "A very fast and expressive template engine."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
|
||||
{file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
|
||||
{file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
|
||||
{file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3536,43 +3545,61 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyodbc"
|
||||
version = "4.0.28"
|
||||
description = "DB API Module for ODBC"
|
||||
version = "5.1.0"
|
||||
description = "DB API module for ODBC"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyodbc-4.0.28-cp27-cp27m-win32.whl", hash = "sha256:2217eb01091a207a9ffa457c49a63a1d0eb8514c810a23b901518348422fcf65"},
|
||||
{file = "pyodbc-4.0.28-cp27-cp27m-win_amd64.whl", hash = "sha256:ae35c455bfbadc631ee20df6657bfda0779bdc80badfd9d13741433dd78785e6"},
|
||||
{file = "pyodbc-4.0.28-cp27-none-macosx_10_15_x86_64.whl", hash = "sha256:f37f26ae909101465a085ef51b9dde35afc93b7c7e38c25b61b124b110aa9998"},
|
||||
{file = "pyodbc-4.0.28-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5d1abca8f5bdab1515e300d05c63c25d072a123c7089a554290b5b9e83168eb6"},
|
||||
{file = "pyodbc-4.0.28-cp36-cp36m-win32.whl", hash = "sha256:c25e525e0576b1dfa067d3a6530e046a24006d89715026d2d5dbf6d4290093b9"},
|
||||
{file = "pyodbc-4.0.28-cp36-cp36m-win_amd64.whl", hash = "sha256:259b2554d2b8c9a6247871fec741b526f0b63a0e42676bd8f210e214a3015129"},
|
||||
{file = "pyodbc-4.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ad9aa2a851242109141e4275c2a9b4d4379e00288959acd877501ee90aa3955"},
|
||||
{file = "pyodbc-4.0.28-cp37-cp37m-win32.whl", hash = "sha256:2908f73e5a374437fd7a38f14b09f2b96d742235bf2f819fb697f8922e35ddda"},
|
||||
{file = "pyodbc-4.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a1a1687edef4319ae533e1d789c6c8241459f04af9e4db76e6e4045c530239de"},
|
||||
{file = "pyodbc-4.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4f3c788d231907f45ea329cd245b398b165d9d28809f55814240eea775a6b1cd"},
|
||||
{file = "pyodbc-4.0.28-cp38-cp38-win32.whl", hash = "sha256:93e495c51a5db027c2f7ee2c2c3fe9d6ea86b3a61392c7c8961a1818951868c8"},
|
||||
{file = "pyodbc-4.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:49ba851be2d9d07cc1472b43febc93e3362c1e09ceb3eac84693a6690d090165"},
|
||||
{file = "pyodbc-4.0.28.tar.gz", hash = "sha256:510643354c4c687ed96bf7e7cec4d02d6c626ecf3e18696f5a0228dd6d11b769"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02fe9821711a2d14415eaeb4deab471d2c8b7034b107e524e414c0e133c42248"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2cbdbd019756285dc44bc35238a3ed8dfaa454e8c8b2c3462f1710cfeebfb290"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84df3bbce9bafe65abd25788d55c9f1da304f6115d70f25758ff8c85f3ce0517"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:218bb75d4bc67075529a65ce8ec7daeed1d83c33dd7410450fbf68d43d184d28"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-win32.whl", hash = "sha256:eae576b3b67d21d6f237e18bb5f3df8323a2258f52c3e3afeef79269704072a9"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:c3b65343557f4c7753204e06f4c82c97ed212a636501f4bc27c5ce0e549eb3e8"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa6f46377da303bf79bcb4b559899507df4b2559f30dcfdf191358ee4b99f3ab"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b19d7f44cfee89901e482f554a88177e83fae76b03c3f830e0023a195d840220"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c36448322f8d6479d87c528cf52401a6ea4f509b9637750b67340382b4e1b40"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e0cb79222aad4b31a3602e39b242683c29c6221a16ed43f45f18fd0b73659"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-win32.whl", hash = "sha256:92caed9d445815ed3f7e5a1249e29a4600ebc1e99404df81b6ed7671074c9227"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a1bd14633e91b7a9814f4fd944c9ebb89fb7f1fd4710c4e3999b5ef041536347"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3d9cc4af703c4817b6e604315910b0cf5dcb68056d52b25ca072dd59c52dcbc"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:406b8fa2133a7b6a713aa5187dba2d08cf763b5884606bed77610a7660fdfabe"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8488c3818f12207650836c5c6f7352f9ff9f56a05a05512145995e497c0bbb1"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0df69e3a500791b70b5748c68a79483b24428e4c16027b56aa0305e95c143a4"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-win32.whl", hash = "sha256:aa4e02d3a9bf819394510b726b25f1566f8b3f0891ca400ad2d4c8b86b535b78"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:33f4984af38872e7bdec78007a34e4d43ae72bf9d0bae3344e79d9d0db157c0e"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29425e2d366e7f5828b76c7993f412a3db4f18bd5bcee00186c00b5a5965e205"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2bbd2e75c77dee9f3cd100c3246110abaeb9af3f7fa304ccc2934ff9c6a4fa4"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3602136a936bc0c1bb9722eb2fbf2042b3ff1ddccdc4688e514b82d4b831563b"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bed1c843565d3a4fd8c332ebceaf33efe817657a0505eacb97dd1b786a985b0b"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-win32.whl", hash = "sha256:735f6da3762e5856b5580be0ed96bb946948346ebd1e526d5169a5513626a67a"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c5bb4e43f6c72f5fa2c634570e0d761767d8ea49f39205229b812fb4d3fe05aa"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33f0f1d7764cefef6f787936bd6359670828a6086be67518ab951f1f7f503cda"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be3b1c36c31ec7d73d0b34a8ad8743573763fadd8f2bceef1e84408252b48dce"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e71a51c252b503b4d753e21ed31e640015fc0d00202d42ea42f2396fcc924b4a"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5282cc8b667af97d76f4955250619a53f25486cbb6b1f45a06b781006ffa0b"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-win32.whl", hash = "sha256:96b2a8dc27693a517e3aad3944a7faa8be95d40d7ec1eda51a1885162eedfa33"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:e738c5eedb4a0cbab20cc008882f49b106054499db56864057c2530ff208cf32"},
|
||||
{file = "pyodbc-5.1.0.tar.gz", hash = "sha256:397feee44561a6580be08cedbe986436859563f4bb378f48224655c8e987ea60"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyopenssl"
|
||||
version = "23.2.0"
|
||||
version = "24.2.1"
|
||||
description = "Python wrapper module around the OpenSSL library"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"},
|
||||
{file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"},
|
||||
{file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"},
|
||||
{file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42"
|
||||
cryptography = ">=41.0.5,<44"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"]
|
||||
test = ["flaky", "pretend", "pytest (>=3.0.1)"]
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"]
|
||||
test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
@@ -3825,6 +3852,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||
@@ -4011,13 +4039,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.31.0"
|
||||
version = "2.32.3"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
|
||||
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4296,19 +4324,18 @@ tornado = ["tornado (>=5)"]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "69.0.3"
|
||||
version = "70.0.0"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"},
|
||||
{file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"},
|
||||
{file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"},
|
||||
{file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||
testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "simple-salesforce"
|
||||
@@ -4454,32 +4481,37 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "snowflake-connector-python"
|
||||
version = "3.4.0"
|
||||
version = "3.12.0"
|
||||
description = "Snowflake Connector for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "snowflake-connector-python-3.4.0.tar.gz", hash = "sha256:09939c300d4e40705db1388c9ba596dce7dd9ee4fa8eea0b6fd67b07756597cb"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1cfb5de0fdd1f08ce3046bcec31d6aad2de0fb5196e8c1c2ebf0960748f8bfcf"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0b4ea9ffac7e8a7ef7f357116f59d2790c07f8bbc0650cf6a717ecaa275440bb"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16c3ed32db6c2804413a766a4aa85eb6687f3e5334d5e1238a56be938ab0fe5e"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c2683d8e0a0baf05bf946caafe2dcc525f57051869c45f9dcbc5ced5f5433b6"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:389226a12ac56a6b78264a258183580c18c0bd5628ae7c48198d7f239f72fc44"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7ed10a5bad779383e099c6c8124e350718d02f48dc7abb48cd3983687d881132"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:93145ea700e548a1b5f7612ed9bd597b49dae85d1914fee62be165d1e8a6bb4f"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed168e53cb0ef09c0788095833f22a1590effbb1eb9167ed21edcedeb4c9faeb"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc7a6aa3b205022beb286cdaa157c2ca3017f2536fbd7d5b6bd6750dbd7861d1"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:432a8b4d0c4194e346eea0bc9329747bb5f6e1a771177d0c33f917d2aef7e421"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4434d1fd0c49c509c631830ca8abf3a3319e90c6993024702a5835991e97946b"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:dcbeac81489ae6a9aac3eb4d35a05147ae8e346a6d95bd5d740b30bbf5342970"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aea046fb928afb86ccbd80ef8a65398044217172ccf82627f00e63316f10832"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1fdc6ce9e6c21969cf4f8365b4aa93cc1622e8b14caf4b26d9d61b5551eda0d"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:4d7ed67213b68e21ff87ae39068926a81dfd1a5d1b84fd6707163050a7c98801"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9cb928b9e04ab5e3681b4f4aeefe0b68c0137aefb4b7363d204a29cc7e8341de"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:843e74bba8c7e73c5d946b244df3d86ae691bb144ed73f9a9be77cdbb892769b"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d71dcd83c9b97216622dc465dca2ed3f0a7e9e736b979d8798daa282f8a53b08"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb7281f2481b924192ea3939f49d766122b59f58d4a9339536f1d2c1a8f86bd7"},
|
||||
{file = "snowflake_connector_python-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:9488e54f1ac2fea80d2a8d94e10552f19eb88db00a21c67a13e0ac4c79ca9a0b"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:edf28df8be24845cfcec653b160d2b8c048d5cb0c85b051f4957f0b0aae1e493"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:c2bbdbbb028d7d542815ed68b28200728aa6707b9354e3a447fdc8c7a34bcdce"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92c9a19a23033df709e63baa6ccdf6eff65210143a8c9c67a0a24bba862034b"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d33d845e4c68d33e73a9f64100b53342c18607ac25c4f2a27dbed2078078d12"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:c1d43bfaa885aab712f14f9ced232abe5023adfca7fbf7a7a0768a162523e9d6"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6a0cc03fb44808f3ddc464ee272f141564c8daea14475e1df5c2a54c7acb2ddf"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:564752d22accc43351b50f676b03aa9f2b441be2641e3cf9a7790faf54eff210"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27d6a1a180832c7b551d38df1094a70fb79917f90c57893b9ce7e219362f6c1"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60675fd83022daef40541d717d006695149c512b283e35741b61a4f48ba537e9"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a567b937b0179d1e95a8ad7200943d286f38d0e76df90af10f747ed9149dd681"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dc333fcfc383a8cab8bd7e890a7c76703e26598925a05954c75d2c50bff06071"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:3c06bfba4a329fd4ec3feba0ada7b31f86ed4e156a9766bced52c2814d001fd2"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acf84b07dd2f22adfaa7d52ccd6be1722bd5a0e2b1a9b08681c3851bea05768f"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:019b8a61e5af689451d502df2af8793fc6f20b5b0a3548fd8ad03aa8b62e7f2d"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:45f9b9678694f10571c1f7ec7d0d741663ad0ff61a71ae53aa71be47faa19978"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:21cbaef51fbed719de01155079df3d004cee963d3723c1ebdb8980923f893e04"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:c86d4a7d49f42ea0bb34218cb49c401ba995892abcfb509ea749cd0a74a8b28a"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aa34aec0f96d7fc7271e38c68ee0d58529875d05e084afb4fc8f09b694643c4"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2f621030b26a220711c64518e00059736b79c1da53afa6a8ce68b31c1941014"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:368e46f1d079056e028bfe8f7171fabef62eb00bcf590df294220b7a5be5d56c"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2735e16fffded0900f7484030613b79699afc1ed4e5cff086bd139a0ce965594"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:c06a8e2e12284b4a4d462d0073fb4983e90ad2d6a2382926f9e3409f06c81d0b"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:880e6e95171cd7374a86da14132fdfc4b622665f134561f4d43e3f35bdacf67d"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e245b84c164433454ce49d78e6bcf5c2e62e25657358bf34ab533166e588f80"},
|
||||
{file = "snowflake_connector_python-3.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:85a5565b8813d164f33f32a825a70443008fe009aae050307f128a1ca892f9ed"},
|
||||
{file = "snowflake_connector_python-3.12.0.tar.gz", hash = "sha256:320e0b6f8cd8556e19c8b87249c931700238b2958313afc7a33108d67da87d82"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4487,24 +4519,24 @@ asn1crypto = ">0.24.0,<2.0.0"
|
||||
certifi = ">=2017.4.17"
|
||||
cffi = ">=1.9,<2.0.0"
|
||||
charset-normalizer = ">=2,<4"
|
||||
cryptography = ">=3.1.0,<42.0.0"
|
||||
cryptography = ">=3.1.0,<43.0.0"
|
||||
filelock = ">=3.5,<4"
|
||||
idna = ">=2.5,<4"
|
||||
packaging = "*"
|
||||
platformdirs = ">=2.6.0,<4.0.0"
|
||||
platformdirs = ">=2.6.0,<5.0.0"
|
||||
pyjwt = "<3.0.0"
|
||||
pyOpenSSL = ">=16.2.0,<24.0.0"
|
||||
pyOpenSSL = ">=16.2.0,<25.0.0"
|
||||
pytz = "*"
|
||||
requests = "<3.0.0"
|
||||
sortedcontainers = ">=2.4.0"
|
||||
tomlkit = "*"
|
||||
typing-extensions = ">=4.3,<5"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
urllib3 = {version = ">=1.21.1,<2.0.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"]
|
||||
pandas = ["pandas (>=1.0.0,<2.1.0)", "pyarrow (>=10.0.1,<10.1.0)"]
|
||||
secure-local-storage = ["keyring (!=16.1.0,<25.0.0)"]
|
||||
pandas = ["pandas (>=1.0.0,<3.0.0)", "pyarrow"]
|
||||
secure-local-storage = ["keyring (>=23.1.0,<26.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "sortedcontainers"
|
||||
@@ -4592,31 +4624,29 @@ test = ["flake8 (>=2.4.0)", "isort (>=3.9.6)", "psycopg2 (>=2.4.6)", "pytest (>=
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy-utils"
|
||||
version = "0.34.2"
|
||||
version = "0.38.3"
|
||||
description = "Various utility functions for SQLAlchemy."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = "~=3.6"
|
||||
files = [
|
||||
{file = "SQLAlchemy-Utils-0.34.2.tar.gz", hash = "sha256:6689b29d7951c5c7c4d79fa6b8c95f9ff9ec708b07aa53f82060599bd14dcc88"},
|
||||
{file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"},
|
||||
{file = "SQLAlchemy_Utils-0.38.3-py3-none-any.whl", hash = "sha256:5c13b5d08adfaa85f3d4e8ec09a75136216fad41346980d02974a70a77988bf9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = "*"
|
||||
SQLAlchemy = ">=1.0"
|
||||
SQLAlchemy = ">=1.3"
|
||||
|
||||
[package.extras]
|
||||
anyjson = ["anyjson (>=0.3.3)"]
|
||||
arrow = ["arrow (>=0.3.4)"]
|
||||
babel = ["Babel (>=1.3)"]
|
||||
color = ["colour (>=0.0.4)"]
|
||||
encrypted = ["cryptography (>=0.6)"]
|
||||
enum = ["enum34"]
|
||||
intervals = ["intervals (>=0.7.1)"]
|
||||
ipaddress = ["ipaddr"]
|
||||
password = ["passlib (>=1.6,<2.0)"]
|
||||
pendulum = ["pendulum (>=2.0.5)"]
|
||||
phone = ["phonenumbers (>=5.9.2)"]
|
||||
test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "mock (==2.0.0)", "pg8000 (>=1.12.4)", "psycopg2 (>=2.5.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "anyjson (>=0.3.3)", "arrow (>=0.3.4)", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "enum34", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "ipaddr", "isort (>=4.2.2)", "mock (==2.0.0)", "passlib (>=1.6,<2.0)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
timezone = ["python-dateutil"]
|
||||
url = ["furl (>=0.4.1)"]
|
||||
|
||||
@@ -4795,13 +4825,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "tomlkit"
|
||||
version = "0.12.3"
|
||||
version = "0.13.0"
|
||||
description = "Style preserving TOML library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"},
|
||||
{file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"},
|
||||
{file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"},
|
||||
{file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4894,13 +4924,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.18"
|
||||
version = "1.26.19"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
files = [
|
||||
{file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
|
||||
{file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
|
||||
{file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
|
||||
{file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -5213,18 +5243,18 @@ docs = ["Sphinx", "elementpath (>=4.1.5,<5.0.0)", "jinja2", "sphinx-rtd-theme"]
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.17.0"
|
||||
version = "3.19.1"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
|
||||
{file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
|
||||
{file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"},
|
||||
{file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "zope-event"
|
||||
@@ -5300,4 +5330,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.8,<3.11"
|
||||
content-hash = "e7985ee5c3ca3a4389b4e85fda033a9b3b867dbbe4b4a7fca8ea5c35fc401148"
|
||||
content-hash = "2f392e4b1cf2dd6c455462028ce8347e698a13a1b26ebe8449d71800bb925f25"
|
||||
|
||||
@@ -12,7 +12,7 @@ force-exclude = '''
|
||||
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "24.05.0-dev"
|
||||
version = "24.08.1-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
@@ -29,7 +29,7 @@ authlib = "0.15.5"
|
||||
backoff = "2.2.1"
|
||||
blinker = "1.6.2"
|
||||
click = "8.1.3"
|
||||
cryptography = "41.0.6"
|
||||
cryptography = "42.0.8"
|
||||
disposable-email-domains = ">=0.0.52"
|
||||
flask = "2.3.2"
|
||||
flask-limiter = "3.3.1"
|
||||
@@ -46,7 +46,7 @@ greenlet = "2.0.2"
|
||||
gunicorn = "22.0.0"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.3"
|
||||
jinja2 = "3.1.4"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
@@ -54,7 +54,7 @@ parsedatetime = "2.4"
|
||||
passlib = "1.7.3"
|
||||
psycopg2-binary = "2.9.6"
|
||||
pyjwt = "2.4.0"
|
||||
pyopenssl = "23.2.0"
|
||||
pyopenssl = "24.2.1"
|
||||
pypd = "1.1.0"
|
||||
pysaml2 = "7.3.1"
|
||||
pystache = "0.6.0"
|
||||
@@ -64,7 +64,7 @@ pytz = ">=2019.3"
|
||||
pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.31.0"
|
||||
requests = "2.32.3"
|
||||
restrictedpython = "6.2"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
@@ -72,19 +72,20 @@ semver = "2.8.1"
|
||||
sentry-sdk = "1.28.1"
|
||||
sqlalchemy = "1.3.24"
|
||||
sqlalchemy-searchable = "1.2.0"
|
||||
sqlalchemy-utils = "0.34.2"
|
||||
sqlalchemy-utils = "0.38.3"
|
||||
sqlparse = "0.5.0"
|
||||
sshtunnel = "0.1.5"
|
||||
statsd = "3.3.0"
|
||||
supervisor = "4.1.0"
|
||||
supervisor-checks = "0.8.1"
|
||||
ua-parser = "0.18.0"
|
||||
urllib3 = "1.26.18"
|
||||
urllib3 = "1.26.19"
|
||||
user-agents = "2.0"
|
||||
werkzeug = "2.3.8"
|
||||
wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
@@ -123,13 +124,13 @@ pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymssql = "2.2.8"
|
||||
pyodbc = "4.0.28"
|
||||
pyodbc = "5.1.0"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.1.0"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
snowflake-connector-python = "3.4.0"
|
||||
snowflake-connector-python = "3.12.0"
|
||||
td-client = "1.0.0"
|
||||
thrift = ">=0.8.0"
|
||||
thrift-sasl = ">=0.1.0"
|
||||
|
||||
@@ -14,7 +14,7 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "24.05.0-dev"
|
||||
__version__ = "24.08.1-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
|
||||
@@ -8,6 +8,7 @@ from redash import settings
|
||||
|
||||
try:
|
||||
from ldap3 import Connection, Server
|
||||
from ldap3.utils.conv import escape_filter_chars
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit(
|
||||
@@ -69,6 +70,7 @@ def login(org_slug=None):
|
||||
|
||||
|
||||
def auth_ldap_user(username, password):
|
||||
clean_username = escape_filter_chars(username)
|
||||
server = Server(settings.LDAP_HOST_URL, use_ssl=settings.LDAP_SSL)
|
||||
if settings.LDAP_BIND_DN is not None:
|
||||
conn = Connection(
|
||||
@@ -83,7 +85,7 @@ def auth_ldap_user(username, password):
|
||||
|
||||
conn.search(
|
||||
settings.LDAP_SEARCH_DN,
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": username},
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": clean_username},
|
||||
attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY],
|
||||
)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from flask import request
|
||||
from funcy import project
|
||||
|
||||
from redash import models
|
||||
from redash import models, utils
|
||||
from redash.handlers.base import (
|
||||
BaseResource,
|
||||
get_object_or_404,
|
||||
@@ -14,6 +14,10 @@ from redash.permissions import (
|
||||
view_only,
|
||||
)
|
||||
from redash.serializers import serialize_alert
|
||||
from redash.tasks.alerts import (
|
||||
notify_subscriptions,
|
||||
should_notify,
|
||||
)
|
||||
|
||||
|
||||
class AlertResource(BaseResource):
|
||||
@@ -43,6 +47,21 @@ class AlertResource(BaseResource):
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
class AlertEvaluateResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
require_admin_or_owner(alert.user.id)
|
||||
|
||||
new_state = alert.evaluate()
|
||||
if should_notify(alert, new_state):
|
||||
alert.state = new_state
|
||||
alert.last_triggered_at = utils.utcnow()
|
||||
models.db.session.commit()
|
||||
|
||||
notify_subscriptions(alert, new_state, {})
|
||||
self.record_event({"action": "evaluate", "object_id": alert.id, "object_type": "alert"})
|
||||
|
||||
|
||||
class AlertMuteResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
|
||||
@@ -3,6 +3,7 @@ from flask_restful import Api
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from redash.handlers.alerts import (
|
||||
AlertEvaluateResource,
|
||||
AlertListResource,
|
||||
AlertMuteResource,
|
||||
AlertResource,
|
||||
@@ -117,6 +118,7 @@ def json_representation(data, code, headers=None):
|
||||
|
||||
api.add_org_resource(AlertResource, "/api/alerts/<alert_id>", endpoint="alert")
|
||||
api.add_org_resource(AlertMuteResource, "/api/alerts/<alert_id>/mute", endpoint="alert_mute")
|
||||
api.add_org_resource(AlertEvaluateResource, "/api/alerts/<alert_id>/eval", endpoint="alert_eval")
|
||||
api.add_org_resource(
|
||||
AlertSubscriptionListResource,
|
||||
"/api/alerts/<alert_id>/subscriptions",
|
||||
@@ -236,11 +238,11 @@ api.add_org_resource(
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryResultResource,
|
||||
"/api/query_results/<result_id>.<filetype>",
|
||||
"/api/query_results/<result_id>",
|
||||
"/api/query_results/<query_result_id>.<filetype>",
|
||||
"/api/query_results/<query_result_id>",
|
||||
"/api/queries/<query_id>/results",
|
||||
"/api/queries/<query_id>/results.<filetype>",
|
||||
"/api/queries/<query_id>/results/<result_id>.<filetype>",
|
||||
"/api/queries/<query_id>/results/<query_result_id>.<filetype>",
|
||||
endpoint="query_result",
|
||||
)
|
||||
api.add_org_resource(
|
||||
|
||||
@@ -7,13 +7,13 @@ from flask_restful import Resource, abort
|
||||
from sqlalchemy import cast
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy_utils.functions import sort_query
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication import current_org
|
||||
from redash.models import db
|
||||
from redash.tasks import record_event as record_event_task
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils.query_order import sort_query
|
||||
|
||||
routes = Blueprint("redash", __name__, template_folder=settings.fix_assets_path("templates"))
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import regex
|
||||
from flask import make_response, request
|
||||
from flask_login import current_user
|
||||
from flask_restful import abort
|
||||
from rq.job import JobStatus
|
||||
|
||||
from redash import models, settings
|
||||
from redash.handlers.base import BaseResource, get_object_or_404, record_event
|
||||
@@ -39,7 +38,7 @@ from redash.utils import (
|
||||
|
||||
|
||||
def error_response(message, http_status=400):
|
||||
return {"job": {"status": JobStatus.FAILED, "error": message}}, http_status
|
||||
return {"job": {"status": 4, "error": message}}, http_status
|
||||
|
||||
|
||||
error_messages = {
|
||||
@@ -226,7 +225,7 @@ class QueryResultResource(BaseResource):
|
||||
headers["Access-Control-Allow-Credentials"] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
|
||||
|
||||
@require_any_of_permission(("view_query", "execute_query"))
|
||||
def options(self, query_id=None, result_id=None, filetype="json"):
|
||||
def options(self, query_id=None, query_result_id=None, filetype="json"):
|
||||
headers = {}
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
@@ -286,12 +285,12 @@ class QueryResultResource(BaseResource):
|
||||
return error_messages["no_permission"]
|
||||
|
||||
@require_any_of_permission(("view_query", "execute_query"))
|
||||
def get(self, query_id=None, result_id=None, filetype="json"):
|
||||
def get(self, query_id=None, query_result_id=None, filetype="json"):
|
||||
"""
|
||||
Retrieve query results.
|
||||
|
||||
:param number query_id: The ID of the query whose results should be fetched
|
||||
:param number result_id: the ID of the query result to fetch
|
||||
:param number query_result_id: the ID of the query result to fetch
|
||||
:param string filetype: Format to return. One of 'json', 'xlsx', or 'csv'. Defaults to 'json'.
|
||||
|
||||
:<json number id: Query result ID
|
||||
@@ -306,13 +305,13 @@ class QueryResultResource(BaseResource):
|
||||
# This method handles two cases: retrieving result by id & retrieving result by query id.
|
||||
# They need to be split, as they have different logic (for example, retrieving by query id
|
||||
# should check for query parameters and shouldn't cache the result).
|
||||
should_cache = result_id is not None
|
||||
should_cache = query_result_id is not None
|
||||
|
||||
query_result = None
|
||||
query = None
|
||||
|
||||
if result_id:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, result_id, self.current_org)
|
||||
if query_result_id:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query_result_id, self.current_org)
|
||||
|
||||
if query_id is not None:
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
@@ -347,7 +346,7 @@ class QueryResultResource(BaseResource):
|
||||
event["object_id"] = query_id
|
||||
else:
|
||||
event["object_type"] = "query_result"
|
||||
event["object_id"] = result_id
|
||||
event["object_id"] = query_result_id
|
||||
|
||||
self.record_event(event)
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ from redash.models.parameterized_query import (
|
||||
QueryDetachedFromDataSourceError,
|
||||
)
|
||||
from redash.models.types import (
|
||||
Configuration,
|
||||
EncryptedConfiguration,
|
||||
JSONText,
|
||||
MutableDict,
|
||||
@@ -227,16 +228,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
|
||||
def _sort_schema(self, schema):
|
||||
return [
|
||||
{
|
||||
"name": i["name"],
|
||||
"description": i.get("description"),
|
||||
"columns": sorted(
|
||||
i["columns"],
|
||||
key=lambda col: (
|
||||
("partition" in col["type"], col.get("idx", 0), col["name"]) if isinstance(col, dict) else col
|
||||
),
|
||||
),
|
||||
}
|
||||
{"name": i["name"], "columns": sorted(i["columns"], key=lambda x: x["name"] if isinstance(x, dict) else x)}
|
||||
for i in sorted(schema, key=lambda x: x["name"])
|
||||
]
|
||||
|
||||
@@ -935,6 +927,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
UNKNOWN_STATE = "unknown"
|
||||
OK_STATE = "ok"
|
||||
TRIGGERED_STATE = "triggered"
|
||||
TEST_STATE = "test"
|
||||
|
||||
id = primary_key("Alert")
|
||||
name = Column(db.String(255))
|
||||
@@ -969,7 +962,24 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
if data["rows"] and self.options["column"] in data["rows"][0]:
|
||||
op = OPERATORS.get(self.options["op"], lambda v, t: False)
|
||||
|
||||
value = data["rows"][0][self.options["column"]]
|
||||
if "selector" not in self.options:
|
||||
selector = "first"
|
||||
else:
|
||||
selector = self.options["selector"]
|
||||
|
||||
if selector == "max":
|
||||
max_val = float("-inf")
|
||||
for i in range(0, len(data["rows"])):
|
||||
max_val = max(max_val, data["rows"][i][self.options["column"]])
|
||||
value = max_val
|
||||
elif selector == "min":
|
||||
min_val = float("inf")
|
||||
for i in range(0, len(data["rows"])):
|
||||
min_val = min(min_val, data["rows"][i][self.options["column"]])
|
||||
value = min_val
|
||||
else:
|
||||
value = data["rows"][0][self.options["column"]]
|
||||
|
||||
threshold = self.options["value"]
|
||||
|
||||
new_state = next_state(op, value, threshold)
|
||||
@@ -997,11 +1007,12 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
result_table = [] # A two-dimensional array which can rendered as a table in Mustache
|
||||
for row in data["rows"]:
|
||||
result_table.append([row[col["name"]] for col in data["columns"]])
|
||||
|
||||
print("OPTIONS", self.options)
|
||||
context = {
|
||||
"ALERT_NAME": self.name,
|
||||
"ALERT_URL": "{host}/alerts/{alert_id}".format(host=host, alert_id=self.id),
|
||||
"ALERT_STATUS": self.state.upper(),
|
||||
"ALERT_SELECTOR": self.options["selector"],
|
||||
"ALERT_CONDITION": self.options["op"],
|
||||
"ALERT_THRESHOLD": self.options["value"],
|
||||
"QUERY_NAME": self.query_rel.name,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import re
|
||||
from functools import partial
|
||||
from numbers import Number
|
||||
|
||||
@@ -88,6 +89,16 @@ def _is_number(string):
|
||||
return True
|
||||
|
||||
|
||||
def _is_regex_pattern(value, regex):
|
||||
try:
|
||||
if re.compile(regex).fullmatch(value):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except re.error:
|
||||
return False
|
||||
|
||||
|
||||
def _is_date(string):
|
||||
parse(string)
|
||||
return True
|
||||
@@ -135,6 +146,7 @@ class ParameterizedQuery:
|
||||
|
||||
enum_options = definition.get("enumOptions")
|
||||
query_id = definition.get("queryId")
|
||||
regex = definition.get("regex")
|
||||
allow_multiple_values = isinstance(definition.get("multiValuesOptions"), dict)
|
||||
|
||||
if isinstance(enum_options, str):
|
||||
@@ -142,6 +154,7 @@ class ParameterizedQuery:
|
||||
|
||||
validators = {
|
||||
"text": lambda value: isinstance(value, str),
|
||||
"text-pattern": lambda value: _is_regex_pattern(value, regex),
|
||||
"number": _is_number,
|
||||
"enum": lambda value: _is_value_within_options(value, enum_options, allow_multiple_values),
|
||||
"query": lambda value: _is_value_within_options(
|
||||
|
||||
@@ -3,10 +3,21 @@ from sqlalchemy.ext.mutable import Mutable
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
from sqlalchemy_utils import EncryptedType
|
||||
|
||||
from redash.models.base import db
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
|
||||
from .base import db
|
||||
|
||||
|
||||
class Configuration(TypeDecorator):
|
||||
impl = db.Text
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
return value.to_json()
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return ConfigurationContainer.from_json(value)
|
||||
|
||||
|
||||
class EncryptedConfiguration(EncryptedType):
|
||||
def process_bind_param(self, value, dialect):
|
||||
|
||||
@@ -166,7 +166,7 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
if self._profile_image_url:
|
||||
return self._profile_image_url
|
||||
|
||||
email_md5 = hashlib.md5(self.email.lower().encode()).hexdigest()
|
||||
email_md5 = hashlib.md5(self.email.lower().encode(), usedforsecurity=False).hexdigest()
|
||||
return "https://www.gravatar.com/avatar/{}?s=40&d=identicon".format(email_md5)
|
||||
|
||||
@property
|
||||
@@ -233,7 +233,9 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
return AccessPermission.exists(obj, access_type, grantee=self)
|
||||
|
||||
def get_id(self):
|
||||
identity = hashlib.md5("{},{}".format(self.email, self.password_hash).encode()).hexdigest()
|
||||
identity = hashlib.md5(
|
||||
"{},{}".format(self.email, self.password_hash).encode(), usedforsecurity=False
|
||||
).hexdigest()
|
||||
return "{0}-{1}".format(self.id, identity)
|
||||
|
||||
def get_actual_user(self):
|
||||
|
||||
@@ -21,9 +21,7 @@ OPTIONAL_CREDENTIALS = parse_boolean(os.environ.get("ATHENA_OPTIONAL_CREDENTIALS
|
||||
|
||||
try:
|
||||
import boto3
|
||||
import pandas as pd
|
||||
import pyathena
|
||||
from pyathena.pandas_cursor import PandasCursor
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
@@ -78,6 +76,10 @@ class Athena(BaseQueryRunner):
|
||||
"default": "default",
|
||||
},
|
||||
"glue": {"type": "boolean", "title": "Use Glue Data Catalog"},
|
||||
"catalog_ids": {
|
||||
"type": "string",
|
||||
"title": "Enter Glue Data Catalog IDs, separated by commas (leave blank for default catalog)",
|
||||
},
|
||||
"work_group": {
|
||||
"type": "string",
|
||||
"title": "Athena Work Group",
|
||||
@@ -90,7 +92,7 @@ class Athena(BaseQueryRunner):
|
||||
},
|
||||
},
|
||||
"required": ["region", "s3_staging_dir"],
|
||||
"extra_options": ["glue", "cost_per_tb"],
|
||||
"extra_options": ["glue", "catalog_ids", "cost_per_tb"],
|
||||
"order": [
|
||||
"region",
|
||||
"s3_staging_dir",
|
||||
@@ -174,56 +176,39 @@ class Athena(BaseQueryRunner):
|
||||
"region_name": self.configuration["region"],
|
||||
}
|
||||
|
||||
def __get_schema_from_glue(self):
|
||||
def __get_schema_from_glue(self, catalog_id=""):
|
||||
client = boto3.client("glue", **self._get_iam_credentials())
|
||||
schema = {}
|
||||
|
||||
database_paginator = client.get_paginator("get_databases")
|
||||
table_paginator = client.get_paginator("get_tables")
|
||||
|
||||
for databases in database_paginator.paginate():
|
||||
databases_iterator = database_paginator.paginate(
|
||||
**({"CatalogId": catalog_id} if catalog_id != "" else {}),
|
||||
)
|
||||
|
||||
for databases in databases_iterator:
|
||||
for database in databases["DatabaseList"]:
|
||||
iterator = table_paginator.paginate(DatabaseName=database["Name"])
|
||||
iterator = table_paginator.paginate(
|
||||
DatabaseName=database["Name"],
|
||||
**({"CatalogId": catalog_id} if catalog_id != "" else {}),
|
||||
)
|
||||
for table in iterator.search("TableList[]"):
|
||||
table_name = "%s.%s" % (database["Name"], table["Name"])
|
||||
if "StorageDescriptor" not in table:
|
||||
logger.warning("Glue table doesn't have StorageDescriptor: %s", table_name)
|
||||
continue
|
||||
if table_name not in schema:
|
||||
columns = []
|
||||
for cols in table["StorageDescriptor"]["Columns"]:
|
||||
c = {
|
||||
"name": cols["Name"],
|
||||
}
|
||||
if "Type" in cols:
|
||||
c["type"] = cols["Type"]
|
||||
if "Comment" in cols:
|
||||
c["comment"] = cols["Comment"]
|
||||
columns.append(c)
|
||||
|
||||
schema[table_name] = {
|
||||
"name": table_name,
|
||||
"columns": columns,
|
||||
"description": table.get("Description"),
|
||||
}
|
||||
for idx, partition in enumerate(table.get("PartitionKeys", [])):
|
||||
schema[table_name]["columns"].append(
|
||||
{
|
||||
"name": partition["Name"],
|
||||
"type": "partition",
|
||||
"idx": idx,
|
||||
}
|
||||
)
|
||||
if "Type" in partition:
|
||||
_type = partition["Type"]
|
||||
c["type"] = f"partition ({_type})"
|
||||
if "Comment" in partition:
|
||||
c["comment"] = partition["Comment"]
|
||||
column = [columns["Name"] for columns in table["StorageDescriptor"]["Columns"]]
|
||||
schema[table_name] = {"name": table_name, "columns": column}
|
||||
for partition in table.get("PartitionKeys", []):
|
||||
schema[table_name]["columns"].append(partition["Name"])
|
||||
return list(schema.values())
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
if self.configuration.get("glue", False):
|
||||
return self.__get_schema_from_glue()
|
||||
catalog_ids = [id.strip() for id in self.configuration.get("catalog_ids", "").split(",")]
|
||||
return sum([self.__get_schema_from_glue(catalog_id) for catalog_id in catalog_ids], [])
|
||||
|
||||
schema = {}
|
||||
query = """
|
||||
@@ -252,7 +237,6 @@ class Athena(BaseQueryRunner):
|
||||
kms_key=self.configuration.get("kms_key", None),
|
||||
work_group=self.configuration.get("work_group", "primary"),
|
||||
formatter=SimpleFormatter(),
|
||||
cursor_class=PandasCursor,
|
||||
**self._get_iam_credentials(user=user),
|
||||
).cursor()
|
||||
|
||||
@@ -260,8 +244,7 @@ class Athena(BaseQueryRunner):
|
||||
cursor.execute(query)
|
||||
column_tuples = [(i[0], _TYPE_MAPPINGS.get(i[1], None)) for i in cursor.description]
|
||||
columns = self.fetch_columns(column_tuples)
|
||||
df = cursor.as_pandas().replace({pd.NA: None})
|
||||
rows = df.to_dict(orient="records")
|
||||
rows = [dict(zip(([c["name"] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
qbytes = None
|
||||
athena_query_id = None
|
||||
try:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Tuple
|
||||
|
||||
@@ -64,6 +65,7 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
||||
return data, error
|
||||
|
||||
def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
|
||||
query = json.loads(query)
|
||||
index_name = query.pop("index", "")
|
||||
result_fields = query.pop("result_fields", None)
|
||||
url = "/{}/_search".format(index_name)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -117,13 +117,14 @@ def parse_results(results: list, flatten: bool = False) -> list:
|
||||
|
||||
parsed_row = _parse_dict(row, flatten)
|
||||
for column_name, value in parsed_row.items():
|
||||
columns.append(
|
||||
{
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(type(value), TYPE_STRING),
|
||||
}
|
||||
)
|
||||
if _get_column_by_name(columns, column_name) is None:
|
||||
columns.append(
|
||||
{
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(type(value), TYPE_STRING),
|
||||
}
|
||||
)
|
||||
|
||||
rows.append(parsed_row)
|
||||
|
||||
|
||||
@@ -108,8 +108,6 @@ def build_schema(query_result, schema):
|
||||
column = row["column_name"]
|
||||
if row.get("data_type") is not None:
|
||||
column = {"name": row["column_name"], "type": row["data_type"]}
|
||||
if "column_comment" in row:
|
||||
column["comment"] = row["column_comment"]
|
||||
|
||||
schema[table_name]["columns"].append(column)
|
||||
|
||||
@@ -224,9 +222,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
SELECT s.nspname as table_schema,
|
||||
c.relname as table_name,
|
||||
a.attname as column_name,
|
||||
null as data_type,
|
||||
null as column_comment,
|
||||
null as idx
|
||||
null as data_type
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace s
|
||||
ON c.relnamespace = s.oid
|
||||
@@ -235,23 +231,17 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
ON a.attrelid = c.oid
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
WHERE c.relkind IN ('m', 'f', 'p') AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
||||
WHERE c.relkind IN ('m', 'f', 'p')
|
||||
AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
||||
AND has_schema_privilege(s.nspname, 'usage')
|
||||
|
||||
UNION
|
||||
|
||||
SELECT table_schema,
|
||||
table_name,
|
||||
column_name,
|
||||
data_type,
|
||||
pgd.description,
|
||||
isc.ordinal_position
|
||||
FROM information_schema.columns as isc
|
||||
LEFT JOIN pg_catalog.pg_statio_all_tables as st
|
||||
ON isc.table_schema = st.schemaname
|
||||
AND isc.table_name = st.relname
|
||||
LEFT JOIN pg_catalog.pg_description pgd
|
||||
ON pgd.objoid=st.relid
|
||||
AND pgd.objsubid=isc.ordinal_position
|
||||
data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
"""
|
||||
|
||||
|
||||
@@ -90,7 +90,9 @@ def create_tables_from_query_ids(user, connection, query_ids, query_params, cach
|
||||
|
||||
for query in set(query_params):
|
||||
results = get_query_results(user, query[0], False, query[1])
|
||||
table_hash = hashlib.md5("query_{query}_{hash}".format(query=query[0], hash=query[1]).encode()).hexdigest()
|
||||
table_hash = hashlib.md5(
|
||||
"query_{query}_{hash}".format(query=query[0], hash=query[1]).encode(), usedforsecurity=False
|
||||
).hexdigest()
|
||||
table_name = "query_{query_id}_{param_hash}".format(query_id=query[0], param_hash=table_hash)
|
||||
create_table(connection, table_name, results)
|
||||
|
||||
@@ -142,7 +144,9 @@ def create_table(connection, table_name, query_results):
|
||||
|
||||
def prepare_parameterized_query(query, query_params):
|
||||
for params in query_params:
|
||||
table_hash = hashlib.md5("query_{query}_{hash}".format(query=params[0], hash=params[1]).encode()).hexdigest()
|
||||
table_hash = hashlib.md5(
|
||||
"query_{query}_{hash}".format(query=params[0], hash=params[1]).encode(), usedforsecurity=False
|
||||
).hexdigest()
|
||||
key = "param_query_{query_id}_{{{param_string}}}".format(query_id=params[0], param_string=params[1])
|
||||
value = "query_{query_id}_{param_hash}".format(query_id=params[0], param_hash=table_hash)
|
||||
query = query.replace(key, value)
|
||||
|
||||
@@ -7,7 +7,6 @@ separation of concerns.
|
||||
from flask_login import current_user
|
||||
from funcy import project
|
||||
from rq.job import JobStatus
|
||||
from rq.results import Result
|
||||
from rq.timeouts import JobTimeoutException
|
||||
|
||||
from redash import models
|
||||
@@ -272,19 +271,38 @@ class DashboardSerializer(Serializer):
|
||||
|
||||
|
||||
def serialize_job(job):
|
||||
# TODO: this is mapping to the old Job class statuses. Need to update the client side and remove this
|
||||
STATUSES = {
|
||||
JobStatus.QUEUED: 1,
|
||||
JobStatus.STARTED: 2,
|
||||
JobStatus.FINISHED: 3,
|
||||
JobStatus.FAILED: 4,
|
||||
JobStatus.CANCELED: 5,
|
||||
JobStatus.DEFERRED: 6,
|
||||
JobStatus.SCHEDULED: 7,
|
||||
}
|
||||
|
||||
job_status = job.get_status()
|
||||
if job.is_started:
|
||||
updated_at = job.started_at or 0
|
||||
else:
|
||||
updated_at = 0
|
||||
|
||||
status = job.get_status()
|
||||
error = result_id = None
|
||||
job_result = job.latest_result()
|
||||
if job_result:
|
||||
if job_result.type == Result.Type.SUCCESSFUL:
|
||||
result_id = job_result.return_value
|
||||
else:
|
||||
error = job_result.exc_string
|
||||
status = STATUSES[job_status]
|
||||
result = query_result_id = None
|
||||
|
||||
if job.is_cancelled:
|
||||
error = "Query cancelled by user."
|
||||
status = 4
|
||||
elif isinstance(job.result, Exception):
|
||||
error = str(job.result)
|
||||
status = 4
|
||||
elif isinstance(job.result, dict) and "error" in job.result:
|
||||
error = job.result["error"]
|
||||
status = 4
|
||||
else:
|
||||
error = ""
|
||||
result = query_result_id = job.result
|
||||
|
||||
return {
|
||||
"job": {
|
||||
@@ -292,6 +310,7 @@ def serialize_job(job):
|
||||
"updated_at": updated_at,
|
||||
"status": status,
|
||||
"error": error,
|
||||
"result_id": result_id,
|
||||
"result": result,
|
||||
"query_result_id": query_result_id,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
|
||||
import redis
|
||||
from rq import get_current_job
|
||||
@@ -43,24 +45,24 @@ def enqueue_query(query, data_source, user_id, is_api_key=False, scheduled_query
|
||||
if job_id:
|
||||
logger.info("[%s] Found existing job: %s", query_hash, job_id)
|
||||
job_complete = None
|
||||
job_cancelled = None
|
||||
|
||||
try:
|
||||
job = Job.fetch(job_id)
|
||||
job_exists = True
|
||||
status = job.get_status()
|
||||
job_complete = status in [
|
||||
JobStatus.FINISHED,
|
||||
JobStatus.FAILED,
|
||||
JobStatus.STOPPED,
|
||||
JobStatus.CANCELED,
|
||||
]
|
||||
job_complete = status in [JobStatus.FINISHED, JobStatus.FAILED]
|
||||
job_cancelled = job.is_cancelled
|
||||
|
||||
if job_complete:
|
||||
message = "job found is complete (%s)" % status
|
||||
elif job_cancelled:
|
||||
message = "job found has been cancelled"
|
||||
except NoSuchJobError:
|
||||
message = "job found has expired"
|
||||
job_exists = False
|
||||
|
||||
lock_is_irrelevant = job_complete or not job_exists
|
||||
lock_is_irrelevant = job_complete or job_cancelled or not job_exists
|
||||
|
||||
if lock_is_irrelevant:
|
||||
logger.info("[%s] %s, removing lock", query_hash, message)
|
||||
@@ -145,6 +147,30 @@ def _resolve_user(user_id, is_api_key, query_id):
|
||||
return None
|
||||
|
||||
|
||||
def _get_size_iterative(dict_obj):
|
||||
"""Iteratively finds size of objects in bytes"""
|
||||
seen = set()
|
||||
size = 0
|
||||
objects = deque([dict_obj])
|
||||
|
||||
while objects:
|
||||
current = objects.popleft()
|
||||
if id(current) in seen:
|
||||
continue
|
||||
seen.add(id(current))
|
||||
size += sys.getsizeof(current)
|
||||
|
||||
if isinstance(current, dict):
|
||||
objects.extend(current.keys())
|
||||
objects.extend(current.values())
|
||||
elif hasattr(current, "__dict__"):
|
||||
objects.append(current.__dict__)
|
||||
elif hasattr(current, "__iter__") and not isinstance(current, (str, bytes, bytearray)):
|
||||
objects.extend(current)
|
||||
|
||||
return size
|
||||
|
||||
|
||||
class QueryExecutor:
|
||||
def __init__(self, query, data_source_id, user_id, is_api_key, metadata, is_scheduled_query):
|
||||
self.job = get_current_job()
|
||||
@@ -195,7 +221,7 @@ class QueryExecutor:
|
||||
"job=execute_query query_hash=%s ds_id=%d data_length=%s error=[%s]",
|
||||
self.query_hash,
|
||||
self.data_source_id,
|
||||
data and len(data),
|
||||
data and _get_size_iterative(data),
|
||||
error,
|
||||
)
|
||||
|
||||
|
||||
@@ -65,7 +65,10 @@ class StatsdRecordingWorker(BaseWorker):
|
||||
super().execute_job(job, queue)
|
||||
finally:
|
||||
statsd_client.decr("rq.jobs.running.{}".format(queue.name))
|
||||
statsd_client.incr("rq.jobs.{}.{}".format(job.get_status(), queue.name))
|
||||
if job.get_status() == JobStatus.FINISHED:
|
||||
statsd_client.incr("rq.jobs.finished.{}".format(queue.name))
|
||||
else:
|
||||
statsd_client.incr("rq.jobs.failed.{}".format(queue.name))
|
||||
|
||||
|
||||
class HardLimitingWorker(BaseWorker):
|
||||
@@ -151,7 +154,7 @@ class HardLimitingWorker(BaseWorker):
|
||||
job_status = job.get_status()
|
||||
if job_status is None: # Job completed and its ttl has expired
|
||||
return
|
||||
if job_status not in [JobStatus.FINISHED, JobStatus.FAILED, JobStatus.STOPPED, JobStatus.CANCELED]:
|
||||
if job_status not in [JobStatus.FINISHED, JobStatus.FAILED]:
|
||||
if not job.ended_at:
|
||||
job.ended_at = utcnow()
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ def gen_query_hash(sql):
|
||||
"""
|
||||
sql = COMMENTS_REGEX.sub("", sql)
|
||||
sql = "".join(sql.split())
|
||||
return hashlib.md5(sql.encode("utf-8")).hexdigest()
|
||||
return hashlib.md5(sql.encode("utf-8"), usedforsecurity=False).hexdigest()
|
||||
|
||||
|
||||
def generate_token(length):
|
||||
|
||||
310
redash/utils/query_order.py
Normal file
310
redash/utils/query_order.py
Normal file
@@ -0,0 +1,310 @@
|
||||
# Copyright (c) 2012, Konsta Vesterinen
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# * The names of the contributors may not be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT,
|
||||
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from inspect import isclass
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.orm import mapperlib
|
||||
from sqlalchemy.orm.properties import ColumnProperty
|
||||
from sqlalchemy.orm.query import _ColumnEntity
|
||||
from sqlalchemy.orm.util import AliasedInsp
|
||||
from sqlalchemy.sql.expression import asc, desc
|
||||
|
||||
|
||||
def get_query_descriptor(query, entity, attr):
|
||||
if attr in query_labels(query):
|
||||
return attr
|
||||
else:
|
||||
entity = get_query_entity_by_alias(query, entity)
|
||||
if entity:
|
||||
descriptor = get_descriptor(entity, attr)
|
||||
if hasattr(descriptor, "property") and isinstance(descriptor.property, sa.orm.RelationshipProperty):
|
||||
return
|
||||
return descriptor
|
||||
|
||||
|
||||
def query_labels(query):
|
||||
"""
|
||||
Return all labels for given SQLAlchemy query object.
|
||||
Example::
|
||||
query = session.query(
|
||||
Category,
|
||||
db.func.count(Article.id).label('articles')
|
||||
)
|
||||
query_labels(query) # ['articles']
|
||||
:param query: SQLAlchemy Query object
|
||||
"""
|
||||
return [
|
||||
entity._label_name for entity in query._entities if isinstance(entity, _ColumnEntity) and entity._label_name
|
||||
]
|
||||
|
||||
|
||||
def get_query_entity_by_alias(query, alias):
|
||||
entities = get_query_entities(query)
|
||||
if not alias:
|
||||
return entities[0]
|
||||
for entity in entities:
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
name = sa.inspect(entity).name
|
||||
else:
|
||||
name = get_mapper(entity).tables[0].name
|
||||
if name == alias:
|
||||
return entity
|
||||
|
||||
|
||||
def get_query_entities(query):
|
||||
"""
|
||||
Return a list of all entities present in given SQLAlchemy query object.
|
||||
Examples::
|
||||
from sqlalchemy_utils import get_query_entities
|
||||
query = session.query(Category)
|
||||
get_query_entities(query) # [<Category>]
|
||||
query = session.query(Category.id)
|
||||
get_query_entities(query) # [<Category>]
|
||||
This function also supports queries with joins.
|
||||
::
|
||||
query = session.query(Category).join(Article)
|
||||
get_query_entities(query) # [<Category>, <Article>]
|
||||
.. versionchanged: 0.26.7
|
||||
This function now returns a list instead of generator
|
||||
:param query: SQLAlchemy Query object
|
||||
"""
|
||||
exprs = [
|
||||
d["expr"] if is_labeled_query(d["expr"]) or isinstance(d["expr"], sa.Column) else d["entity"]
|
||||
for d in query.column_descriptions
|
||||
]
|
||||
return [get_query_entity(expr) for expr in exprs] + [get_query_entity(entity) for entity in query._join_entities]
|
||||
|
||||
|
||||
def is_labeled_query(expr):
|
||||
return isinstance(expr, sa.sql.elements.Label) and isinstance(
|
||||
list(expr.base_columns)[0], (sa.sql.selectable.Select, sa.sql.selectable.ScalarSelect)
|
||||
)
|
||||
|
||||
|
||||
def get_query_entity(expr):
|
||||
if isinstance(expr, sa.orm.attributes.InstrumentedAttribute):
|
||||
return expr.parent.class_
|
||||
elif isinstance(expr, sa.Column):
|
||||
return expr.table
|
||||
elif isinstance(expr, AliasedInsp):
|
||||
return expr.entity
|
||||
return expr
|
||||
|
||||
|
||||
def get_mapper(mixed):
|
||||
"""
|
||||
Return related SQLAlchemy Mapper for given SQLAlchemy object.
|
||||
:param mixed: SQLAlchemy Table / Alias / Mapper / declarative model object
|
||||
::
|
||||
from sqlalchemy_utils import get_mapper
|
||||
get_mapper(User)
|
||||
get_mapper(User())
|
||||
get_mapper(User.__table__)
|
||||
get_mapper(User.__mapper__)
|
||||
get_mapper(sa.orm.aliased(User))
|
||||
get_mapper(sa.orm.aliased(User.__table__))
|
||||
Raises:
|
||||
ValueError: if multiple mappers were found for given argument
|
||||
.. versionadded: 0.26.1
|
||||
"""
|
||||
if isinstance(mixed, sa.orm.query._MapperEntity):
|
||||
mixed = mixed.expr
|
||||
elif isinstance(mixed, sa.Column):
|
||||
mixed = mixed.table
|
||||
elif isinstance(mixed, sa.orm.query._ColumnEntity):
|
||||
mixed = mixed.expr
|
||||
if isinstance(mixed, sa.orm.Mapper):
|
||||
return mixed
|
||||
if isinstance(mixed, sa.orm.util.AliasedClass):
|
||||
return sa.inspect(mixed).mapper
|
||||
if isinstance(mixed, sa.sql.selectable.Alias):
|
||||
mixed = mixed.element
|
||||
if isinstance(mixed, AliasedInsp):
|
||||
return mixed.mapper
|
||||
if isinstance(mixed, sa.orm.attributes.InstrumentedAttribute):
|
||||
mixed = mixed.class_
|
||||
if isinstance(mixed, sa.Table):
|
||||
mappers = [mapper for mapper in mapperlib._mapper_registry if mixed in mapper.tables]
|
||||
if len(mappers) > 1:
|
||||
raise ValueError("Multiple mappers found for table '%s'." % mixed.name)
|
||||
elif not mappers:
|
||||
raise ValueError("Could not get mapper for table '%s'." % mixed.name)
|
||||
else:
|
||||
return mappers[0]
|
||||
if not isclass(mixed):
|
||||
mixed = type(mixed)
|
||||
return sa.inspect(mixed)
|
||||
|
||||
|
||||
def get_polymorphic_mappers(mixed):
|
||||
if isinstance(mixed, AliasedInsp):
|
||||
return mixed.with_polymorphic_mappers
|
||||
else:
|
||||
return mixed.polymorphic_map.values()
|
||||
|
||||
|
||||
def get_descriptor(entity, attr):
|
||||
mapper = sa.inspect(entity)
|
||||
for key, descriptor in get_all_descriptors(mapper).items():
|
||||
if attr == key:
|
||||
prop = descriptor.property if hasattr(descriptor, "property") else None
|
||||
if isinstance(prop, ColumnProperty):
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
for c in mapper.selectable.c:
|
||||
if c.key == attr:
|
||||
return c
|
||||
else:
|
||||
# If the property belongs to a class that uses
|
||||
# polymorphic inheritance we have to take into account
|
||||
# situations where the attribute exists in child class
|
||||
# but not in parent class.
|
||||
return getattr(prop.parent.class_, attr)
|
||||
else:
|
||||
# Handle synonyms, relationship properties and hybrid
|
||||
# properties
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
return getattr(entity, attr)
|
||||
try:
|
||||
return getattr(mapper.class_, attr)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def get_all_descriptors(expr):
|
||||
if isinstance(expr, sa.sql.selectable.Selectable):
|
||||
return expr.c
|
||||
insp = sa.inspect(expr)
|
||||
try:
|
||||
polymorphic_mappers = get_polymorphic_mappers(insp)
|
||||
except sa.exc.NoInspectionAvailable:
|
||||
return get_mapper(expr).all_orm_descriptors
|
||||
else:
|
||||
attrs = dict(get_mapper(expr).all_orm_descriptors)
|
||||
for submapper in polymorphic_mappers:
|
||||
for key, descriptor in submapper.all_orm_descriptors.items():
|
||||
if key not in attrs:
|
||||
attrs[key] = descriptor
|
||||
return attrs
|
||||
|
||||
|
||||
class QuerySorterException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class QuerySorter:
|
||||
def __init__(self, silent=True, separator="-"):
|
||||
self.separator = separator
|
||||
self.silent = silent
|
||||
|
||||
def assign_order_by(self, entity, attr, func):
|
||||
expr = get_query_descriptor(self.query, entity, attr)
|
||||
if expr is not None:
|
||||
return self.query.order_by(func(expr))
|
||||
if not self.silent:
|
||||
raise QuerySorterException("Could not sort query with expression '%s'" % attr)
|
||||
return self.query
|
||||
|
||||
def parse_sort_arg(self, arg):
|
||||
if arg[0] == self.separator:
|
||||
func = desc
|
||||
arg = arg[1:]
|
||||
else:
|
||||
func = asc
|
||||
parts = arg.split(self.separator)
|
||||
return {
|
||||
"entity": parts[0] if len(parts) > 1 else None,
|
||||
"attr": parts[1] if len(parts) > 1 else arg,
|
||||
"func": func,
|
||||
}
|
||||
|
||||
def __call__(self, query, *args):
|
||||
self.query = query
|
||||
for sort in args:
|
||||
if not sort:
|
||||
continue
|
||||
self.query = self.assign_order_by(**self.parse_sort_arg(sort))
|
||||
return self.query
|
||||
|
||||
|
||||
def sort_query(query, *args, **kwargs):
|
||||
"""
|
||||
Applies an sql ORDER BY for given query. This function can be easily used
|
||||
with user-defined sorting.
|
||||
The examples use the following model definition:
|
||||
::
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy_utils import sort_query
|
||||
engine = create_engine(
|
||||
'sqlite:///'
|
||||
)
|
||||
Base = declarative_base()
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
class Category(Base):
|
||||
__tablename__ = 'category'
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
name = sa.Column(sa.Unicode(255))
|
||||
class Article(Base):
|
||||
__tablename__ = 'article'
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
name = sa.Column(sa.Unicode(255))
|
||||
category_id = sa.Column(sa.Integer, sa.ForeignKey(Category.id))
|
||||
category = sa.orm.relationship(
|
||||
Category, primaryjoin=category_id == Category.id
|
||||
)
|
||||
1. Applying simple ascending sort
|
||||
::
|
||||
query = session.query(Article)
|
||||
query = sort_query(query, 'name')
|
||||
2. Applying descending sort
|
||||
::
|
||||
query = sort_query(query, '-name')
|
||||
3. Applying sort to custom calculated label
|
||||
::
|
||||
query = session.query(
|
||||
Category, sa.func.count(Article.id).label('articles')
|
||||
)
|
||||
query = sort_query(query, 'articles')
|
||||
4. Applying sort to joined table column
|
||||
::
|
||||
query = session.query(Article).join(Article.category)
|
||||
query = sort_query(query, 'category-name')
|
||||
:param query:
|
||||
query to be modified
|
||||
:param sort:
|
||||
string that defines the label or column to sort the query by
|
||||
:param silent:
|
||||
Whether or not to raise exceptions if unknown sort column
|
||||
is passed. By default this is `True` indicating that no errors should
|
||||
be raised for unknown columns.
|
||||
"""
|
||||
return QuerySorter(**kwargs)(query, *args)
|
||||
@@ -1,4 +1,9 @@
|
||||
import datetime
|
||||
|
||||
from mock import patch
|
||||
|
||||
from redash.models import Alert, AlertSubscription, db
|
||||
from redash.utils import utcnow
|
||||
from tests import BaseTestCase
|
||||
|
||||
|
||||
@@ -39,6 +44,26 @@ class TestAlertResourcePost(BaseTestCase):
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
|
||||
|
||||
class TestAlertEvaluateResource(BaseTestCase):
|
||||
@patch("redash.handlers.alerts.notify_subscriptions")
|
||||
def test_evaluates_alert_and_notifies(self, mock_notify_subscriptions):
|
||||
query = self.factory.create_query(
|
||||
data_source=self.factory.create_data_source(group=self.factory.create_group())
|
||||
)
|
||||
retrieved_at = utcnow() - datetime.timedelta(days=1)
|
||||
query_result = self.factory.create_query_result(
|
||||
retrieved_at=retrieved_at,
|
||||
query_text=query.query_text,
|
||||
query_hash=query.query_hash,
|
||||
)
|
||||
query.latest_query_data = query_result
|
||||
alert = self.factory.create_alert(query_rel=query)
|
||||
rv = self.make_request("post", "/api/alerts/{}/eval".format(alert.id))
|
||||
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
mock_notify_subscriptions.assert_called()
|
||||
|
||||
|
||||
class TestAlertResourceDelete(BaseTestCase):
|
||||
def test_removes_alert_and_subscriptions(self):
|
||||
subscription = self.factory.create_alert_subscription()
|
||||
|
||||
91
tests/handlers/test_order_results.py
Normal file
91
tests/handlers/test_order_results.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from redash import models
|
||||
from redash.handlers.base import order_results
|
||||
from redash.models import db
|
||||
from tests import BaseTestCase
|
||||
|
||||
|
||||
class TestOrderResults(BaseTestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
user1 = self.factory.create_user(name="Charlie")
|
||||
user2 = self.factory.create_user(name="Bravo")
|
||||
user3 = self.factory.create_user(name="Alpha")
|
||||
|
||||
q1 = self.factory.create_query(name="a", user=user1)
|
||||
q2 = self.factory.create_query(name="b", user=user2)
|
||||
q3 = self.factory.create_query(name="c", user=user3)
|
||||
|
||||
db.session.add(user1)
|
||||
db.session.add(user2)
|
||||
db.session.add(user3)
|
||||
|
||||
db.session.add(q1)
|
||||
db.session.add(q2)
|
||||
db.session.add(q3)
|
||||
db.session.commit()
|
||||
|
||||
self.results = db.session.query(models.Query)
|
||||
self.results = self.results.join(models.User, models.Query.user_id == models.User.id)
|
||||
|
||||
self.allowed_orders = {
|
||||
"name": "name",
|
||||
"-name": "-name",
|
||||
"users-name": "users-name",
|
||||
"-users-name": "-users-name",
|
||||
}
|
||||
self.default_order = "-name"
|
||||
|
||||
def test_no_order_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order="):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
self.assertEqual(self.results, ordered_results)
|
||||
|
||||
def test_no_order_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order="):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
|
||||
def test_invalid_order_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order=some_invalid_order"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, [entry.name for entry in self.results])
|
||||
|
||||
def test_invalid_order_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order=some_invalid_order"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
|
||||
def test_valid_requested_order_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order=name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["a", "b", "c"])
|
||||
|
||||
def test_valid_requested_order_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order=name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["a", "b", "c"])
|
||||
|
||||
def test_requested_entity_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order=users-name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
|
||||
def test_requested_entity_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order=-users-name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["a", "b", "c"])
|
||||
|
||||
def test_order_by_attached(self):
|
||||
self.results = self.results.order_by(models.Query.name)
|
||||
with self.app.test_request_context("/items?order=-name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
@@ -1,5 +1,3 @@
|
||||
from rq.job import JobStatus
|
||||
|
||||
from redash.handlers.query_results import error_messages, run_query
|
||||
from redash.models import db
|
||||
from tests import BaseTestCase
|
||||
@@ -436,6 +434,8 @@ class TestQueryResultExcelResponse(BaseTestCase):
|
||||
|
||||
class TestJobResource(BaseTestCase):
|
||||
def test_cancels_queued_queries(self):
|
||||
QUEUED = 1
|
||||
FAILED = 4
|
||||
|
||||
query = self.factory.create_query()
|
||||
job_id = self.make_request(
|
||||
@@ -447,9 +447,10 @@ class TestJobResource(BaseTestCase):
|
||||
]["id"]
|
||||
|
||||
status = self.make_request("get", f"/api/jobs/{job_id}").json["job"]["status"]
|
||||
self.assertEqual(status, JobStatus.QUEUED)
|
||||
self.assertEqual(status, QUEUED)
|
||||
|
||||
self.make_request("delete", f"/api/jobs/{job_id}")
|
||||
|
||||
job = self.make_request("get", f"/api/jobs/{job_id}").json["job"]
|
||||
self.assertEqual(job["status"], JobStatus.CANCELED)
|
||||
self.assertEqual(job["status"], FAILED)
|
||||
self.assertTrue("cancelled" in job["error"])
|
||||
|
||||
@@ -49,7 +49,9 @@ class TestAlertEvaluate(BaseTestCase):
|
||||
def create_alert(self, results, column="foo", value="1"):
|
||||
result = self.factory.create_query_result(data=results)
|
||||
query = self.factory.create_query(latest_query_data_id=result.id)
|
||||
alert = self.factory.create_alert(query_rel=query, options={"op": "equals", "column": column, "value": value})
|
||||
alert = self.factory.create_alert(
|
||||
query_rel=query, options={"selector": "first", "op": "equals", "column": column, "value": value}
|
||||
)
|
||||
return alert
|
||||
|
||||
def test_evaluate_triggers_alert_when_equal(self):
|
||||
@@ -69,6 +71,24 @@ class TestAlertEvaluate(BaseTestCase):
|
||||
alert = self.create_alert(results)
|
||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||
|
||||
def test_evaluates_correctly_with_max_selector(self):
|
||||
results = {"rows": [{"foo": 1}, {"foo": 2}], "columns": [{"name": "foo", "type": "STRING"}]}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "max"
|
||||
self.assertEqual(alert.evaluate(), Alert.OK_STATE)
|
||||
|
||||
def test_evaluates_correctly_with_min_selector(self):
|
||||
results = {"rows": [{"foo": 2}, {"foo": 1}], "columns": [{"name": "foo", "type": "STRING"}]}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "min"
|
||||
self.assertEqual(alert.evaluate(), Alert.TRIGGERED_STATE)
|
||||
|
||||
def test_evaluates_correctly_with_first_selector(self):
|
||||
results = {"rows": [{"foo": 1}, {"foo": 2}], "columns": [{"name": "foo", "type": "STRING"}]}
|
||||
alert = self.create_alert(results)
|
||||
alert.options["selector"] = "first"
|
||||
self.assertEqual(alert.evaluate(), Alert.TRIGGERED_STATE)
|
||||
|
||||
|
||||
class TestNextState(TestCase):
|
||||
def test_numeric_value(self):
|
||||
@@ -94,7 +114,9 @@ class TestAlertRenderTemplate(BaseTestCase):
|
||||
def create_alert(self, results, column="foo", value="5"):
|
||||
result = self.factory.create_query_result(data=results)
|
||||
query = self.factory.create_query(latest_query_data_id=result.id)
|
||||
alert = self.factory.create_alert(query_rel=query, options={"op": "equals", "column": column, "value": value})
|
||||
alert = self.factory.create_alert(
|
||||
query_rel=query, options={"selector": "first", "op": "equals", "column": column, "value": value}
|
||||
)
|
||||
return alert
|
||||
|
||||
def test_render_custom_alert_template(self):
|
||||
@@ -102,6 +124,7 @@ class TestAlertRenderTemplate(BaseTestCase):
|
||||
custom_alert = """
|
||||
<pre>
|
||||
ALERT_STATUS {{ALERT_STATUS}}
|
||||
ALERT_SELECTOR {{ALERT_SELECTOR}}
|
||||
ALERT_CONDITION {{ALERT_CONDITION}}
|
||||
ALERT_THRESHOLD {{ALERT_THRESHOLD}}
|
||||
ALERT_NAME {{ALERT_NAME}}
|
||||
@@ -116,6 +139,7 @@ class TestAlertRenderTemplate(BaseTestCase):
|
||||
expected = """
|
||||
<pre>
|
||||
ALERT_STATUS UNKNOWN
|
||||
ALERT_SELECTOR first
|
||||
ALERT_CONDITION equals
|
||||
ALERT_THRESHOLD 5
|
||||
ALERT_NAME %s
|
||||
|
||||
@@ -8,7 +8,7 @@ from tests import BaseTestCase
|
||||
|
||||
class DataSourceTest(BaseTestCase):
|
||||
def test_get_schema(self):
|
||||
return_value = [{"name": "table", "columns": [], "description": None}]
|
||||
return_value = [{"name": "table", "columns": []}]
|
||||
|
||||
with mock.patch("redash.query_runner.pg.PostgreSQL.get_schema") as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
@@ -18,7 +18,7 @@ class DataSourceTest(BaseTestCase):
|
||||
self.assertEqual(return_value, schema)
|
||||
|
||||
def test_get_schema_uses_cache(self):
|
||||
return_value = [{"name": "table", "columns": [], "description": None}]
|
||||
return_value = [{"name": "table", "columns": []}]
|
||||
with mock.patch("redash.query_runner.pg.PostgreSQL.get_schema") as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
@@ -29,12 +29,12 @@ class DataSourceTest(BaseTestCase):
|
||||
self.assertEqual(patched_get_schema.call_count, 1)
|
||||
|
||||
def test_get_schema_skips_cache_with_refresh_true(self):
|
||||
return_value = [{"name": "table", "columns": [], "description": None}]
|
||||
return_value = [{"name": "table", "columns": []}]
|
||||
with mock.patch("redash.query_runner.pg.PostgreSQL.get_schema") as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
self.factory.data_source.get_schema()
|
||||
new_return_value = [{"name": "new_table", "columns": [], "description": None}]
|
||||
new_return_value = [{"name": "new_table", "columns": []}]
|
||||
patched_get_schema.return_value = new_return_value
|
||||
schema = self.factory.data_source.get_schema(refresh=True)
|
||||
|
||||
@@ -43,11 +43,10 @@ class DataSourceTest(BaseTestCase):
|
||||
|
||||
def test_schema_sorter(self):
|
||||
input_data = [
|
||||
{"name": "zoo", "columns": ["is_zebra", "is_snake", "is_cow"], "description": None},
|
||||
{"name": "zoo", "columns": ["is_zebra", "is_snake", "is_cow"]},
|
||||
{
|
||||
"name": "all_terain_vehicle",
|
||||
"columns": ["has_wheels", "has_engine", "has_all_wheel_drive"],
|
||||
"description": None,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -55,9 +54,8 @@ class DataSourceTest(BaseTestCase):
|
||||
{
|
||||
"name": "all_terain_vehicle",
|
||||
"columns": ["has_all_wheel_drive", "has_engine", "has_wheels"],
|
||||
"description": None,
|
||||
},
|
||||
{"name": "zoo", "columns": ["is_cow", "is_snake", "is_zebra"], "description": None},
|
||||
{"name": "zoo", "columns": ["is_cow", "is_snake", "is_zebra"]},
|
||||
]
|
||||
|
||||
real_output = self.factory.data_source._sort_schema(input_data)
|
||||
@@ -66,11 +64,10 @@ class DataSourceTest(BaseTestCase):
|
||||
|
||||
def test_model_uses_schema_sorter(self):
|
||||
orig_schema = [
|
||||
{"name": "zoo", "columns": ["is_zebra", "is_snake", "is_cow"], "description": None},
|
||||
{"name": "zoo", "columns": ["is_zebra", "is_snake", "is_cow"]},
|
||||
{
|
||||
"name": "all_terain_vehicle",
|
||||
"columns": ["has_wheels", "has_engine", "has_all_wheel_drive"],
|
||||
"description": None,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -78,9 +75,8 @@ class DataSourceTest(BaseTestCase):
|
||||
{
|
||||
"name": "all_terain_vehicle",
|
||||
"columns": ["has_all_wheel_drive", "has_engine", "has_wheels"],
|
||||
"description": None,
|
||||
},
|
||||
{"name": "zoo", "columns": ["is_cow", "is_snake", "is_zebra"], "description": None},
|
||||
{"name": "zoo", "columns": ["is_cow", "is_snake", "is_zebra"]},
|
||||
]
|
||||
|
||||
with mock.patch("redash.query_runner.pg.PostgreSQL.get_schema") as patched_get_schema:
|
||||
|
||||
@@ -73,6 +73,21 @@ class TestParameterizedQuery(TestCase):
|
||||
|
||||
self.assertEqual("foo baz", query.text)
|
||||
|
||||
def test_validates_text_pattern_parameters(self):
|
||||
schema = [{"name": "bar", "type": "text-pattern", "regex": "a+"}]
|
||||
query = ParameterizedQuery("foo {{bar}}", schema)
|
||||
|
||||
query.apply({"bar": "a"})
|
||||
|
||||
self.assertEqual("foo a", query.text)
|
||||
|
||||
def test_raises_on_invalid_text_pattern_parameters(self):
|
||||
schema = schema = [{"name": "bar", "type": "text-pattern", "regex": "a+"}]
|
||||
query = ParameterizedQuery("foo {{bar}}", schema)
|
||||
|
||||
with pytest.raises(InvalidParameterError):
|
||||
query.apply({"bar": "b"})
|
||||
|
||||
def test_raises_on_invalid_number_parameters(self):
|
||||
schema = [{"name": "bar", "type": "number"}]
|
||||
query = ParameterizedQuery("foo", schema)
|
||||
|
||||
@@ -75,9 +75,7 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [
|
||||
{"columns": [{"name": "row_id", "type": "int"}], "name": "test1.jdbc_table", "description": None}
|
||||
]
|
||||
assert query_runner.get_schema() == [{"columns": ["row_id"], "name": "test1.jdbc_table"}]
|
||||
|
||||
def test_partitioned_table(self):
|
||||
"""
|
||||
@@ -126,16 +124,7 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [
|
||||
{
|
||||
"columns": [
|
||||
{"name": "sk", "type": "partition (int)"},
|
||||
{"name": "category", "type": "partition", "idx": 0},
|
||||
],
|
||||
"name": "test1.partitioned_table",
|
||||
"description": None,
|
||||
}
|
||||
]
|
||||
assert query_runner.get_schema() == [{"columns": ["sk", "category"], "name": "test1.partitioned_table"}]
|
||||
|
||||
def test_view(self):
|
||||
query_runner = Athena({"glue": True, "region": "mars-east-1"})
|
||||
@@ -167,9 +156,7 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [
|
||||
{"columns": [{"name": "sk", "type": "int"}], "name": "test1.view", "description": None}
|
||||
]
|
||||
assert query_runner.get_schema() == [{"columns": ["sk"], "name": "test1.view"}]
|
||||
|
||||
def test_dodgy_table_does_not_break_schema_listing(self):
|
||||
"""
|
||||
@@ -209,9 +196,7 @@ class TestGlueSchema(TestCase):
|
||||
{"DatabaseName": "test1"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [
|
||||
{"columns": [{"name": "region", "type": "string"}], "name": "test1.csv", "description": None}
|
||||
]
|
||||
assert query_runner.get_schema() == [{"columns": ["region"], "name": "test1.csv"}]
|
||||
|
||||
def test_no_storage_descriptor_table(self):
|
||||
"""
|
||||
@@ -236,3 +221,97 @@ class TestGlueSchema(TestCase):
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == []
|
||||
|
||||
def test_multi_catalog_tables(self):
|
||||
"""Tables of multi-catalogs"""
|
||||
query_runner = Athena({"glue": True, "region": "mars-east-1", "catalog_ids": "foo,bar"})
|
||||
|
||||
self.stubber.add_response("get_databases", {"DatabaseList": [{"Name": "test1"}]}, {"CatalogId": "foo"})
|
||||
self.stubber.add_response(
|
||||
"get_tables",
|
||||
{
|
||||
"TableList": [
|
||||
{
|
||||
"Name": "jdbc_table",
|
||||
"StorageDescriptor": {
|
||||
"Columns": [{"Name": "row_id", "Type": "int"}],
|
||||
"Location": "Database.Schema.Table",
|
||||
"Compressed": False,
|
||||
"NumberOfBuckets": -1,
|
||||
"SerdeInfo": {"Parameters": {}},
|
||||
"BucketColumns": [],
|
||||
"SortColumns": [],
|
||||
"Parameters": {
|
||||
"CrawlerSchemaDeserializerVersion": "1.0",
|
||||
"CrawlerSchemaSerializerVersion": "1.0",
|
||||
"UPDATED_BY_CRAWLER": "jdbc",
|
||||
"classification": "sqlserver",
|
||||
"compressionType": "none",
|
||||
"connectionName": "jdbctest",
|
||||
"typeOfData": "view",
|
||||
},
|
||||
"StoredAsSubDirectories": False,
|
||||
},
|
||||
"PartitionKeys": [],
|
||||
"TableType": "EXTERNAL_TABLE",
|
||||
"Parameters": {
|
||||
"CrawlerSchemaDeserializerVersion": "1.0",
|
||||
"CrawlerSchemaSerializerVersion": "1.0",
|
||||
"UPDATED_BY_CRAWLER": "jdbc",
|
||||
"classification": "sqlserver",
|
||||
"compressionType": "none",
|
||||
"connectionName": "jdbctest",
|
||||
"typeOfData": "view",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
{"CatalogId": "foo", "DatabaseName": "test1"},
|
||||
)
|
||||
self.stubber.add_response("get_databases", {"DatabaseList": [{"Name": "test2"}]}, {"CatalogId": "bar"})
|
||||
self.stubber.add_response(
|
||||
"get_tables",
|
||||
{
|
||||
"TableList": [
|
||||
{
|
||||
"Name": "jdbc_table",
|
||||
"StorageDescriptor": {
|
||||
"Columns": [{"Name": "row_id", "Type": "int"}],
|
||||
"Location": "Database.Schema.Table",
|
||||
"Compressed": False,
|
||||
"NumberOfBuckets": -1,
|
||||
"SerdeInfo": {"Parameters": {}},
|
||||
"BucketColumns": [],
|
||||
"SortColumns": [],
|
||||
"Parameters": {
|
||||
"CrawlerSchemaDeserializerVersion": "1.0",
|
||||
"CrawlerSchemaSerializerVersion": "1.0",
|
||||
"UPDATED_BY_CRAWLER": "jdbc",
|
||||
"classification": "sqlserver",
|
||||
"compressionType": "none",
|
||||
"connectionName": "jdbctest",
|
||||
"typeOfData": "view",
|
||||
},
|
||||
"StoredAsSubDirectories": False,
|
||||
},
|
||||
"PartitionKeys": [],
|
||||
"TableType": "EXTERNAL_TABLE",
|
||||
"Parameters": {
|
||||
"CrawlerSchemaDeserializerVersion": "1.0",
|
||||
"CrawlerSchemaSerializerVersion": "1.0",
|
||||
"UPDATED_BY_CRAWLER": "jdbc",
|
||||
"classification": "sqlserver",
|
||||
"compressionType": "none",
|
||||
"connectionName": "jdbctest",
|
||||
"typeOfData": "view",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
{"CatalogId": "bar", "DatabaseName": "test2"},
|
||||
)
|
||||
with self.stubber:
|
||||
assert query_runner.get_schema() == [
|
||||
{"columns": ["row_id"], "name": "test1.jdbc_table"},
|
||||
{"columns": ["row_id"], "name": "test2.jdbc_table"},
|
||||
]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from unittest import TestCase
|
||||
from unittest import TestCase, mock
|
||||
|
||||
from redash.query_runner.elasticsearch2 import (
|
||||
ElasticSearch2,
|
||||
@@ -137,3 +137,14 @@ class TestXPackSQL(TestCase):
|
||||
],
|
||||
}
|
||||
self.assertDictEqual(XPackSQLElasticSearch._parse_results(None, response), expected)
|
||||
|
||||
|
||||
class TestElasticSearch2(TestCase):
|
||||
@mock.patch("redash.query_runner.elasticsearch2.ElasticSearch2.__init__", return_value=None)
|
||||
def test_build_query(self, mock_init):
|
||||
query_runner = ElasticSearch2()
|
||||
query_str = '{"index": "test_index", "result_fields": ["field1", "field2"]}'
|
||||
query_dict, url, result_fields = query_runner._build_query(query_str)
|
||||
self.assertEqual(query_dict, {})
|
||||
self.assertEqual(url, "/test_index/_search")
|
||||
self.assertEqual(result_fields, ["field1", "field2"])
|
||||
|
||||
@@ -5,6 +5,7 @@ from freezegun import freeze_time
|
||||
from mock import patch
|
||||
from pytz import utc
|
||||
|
||||
from redash.query_runner import TYPE_INTEGER, TYPE_STRING
|
||||
from redash.query_runner.mongodb import (
|
||||
MongoDB,
|
||||
_get_column_by_name,
|
||||
@@ -15,7 +16,7 @@ from redash.utils import json_dumps, parse_human_time
|
||||
|
||||
|
||||
@patch("redash.query_runner.mongodb.pymongo.MongoClient")
|
||||
class TestUserPassOverride(TestCase):
|
||||
class TestMongoDB(TestCase):
|
||||
def test_username_password_present_overrides_username_from_uri(self, mongo_client):
|
||||
config = {
|
||||
"connectionString": "mongodb://localhost:27017/test",
|
||||
@@ -37,6 +38,66 @@ class TestUserPassOverride(TestCase):
|
||||
self.assertNotIn("username", mongo_client.call_args.kwargs)
|
||||
self.assertNotIn("password", mongo_client.call_args.kwargs)
|
||||
|
||||
def test_run_query_with_fields(self, mongo_client):
|
||||
config = {
|
||||
"connectionString": "mongodb://localhost:27017/test",
|
||||
"username": "test_user",
|
||||
"password": "test_pass",
|
||||
"dbName": "test",
|
||||
}
|
||||
mongo_qr = MongoDB(config)
|
||||
|
||||
query = {"collection": "test", "query": {"age": 10}, "fields": {"_id": 1, "name": 2}}
|
||||
|
||||
return_value = [{"_id": "6569ee53d53db7930aaa0cc0", "name": "test2"}]
|
||||
|
||||
expected = {
|
||||
"columns": [
|
||||
{"name": "_id", "friendly_name": "_id", "type": TYPE_STRING},
|
||||
{"name": "name", "friendly_name": "name", "type": TYPE_STRING},
|
||||
],
|
||||
"rows": return_value,
|
||||
}
|
||||
|
||||
mongo_client().__getitem__().__getitem__().find.return_value = return_value
|
||||
result, err = mongo_qr.run_query(json_dumps(query), None)
|
||||
|
||||
self.assertIsNone(err)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_run_query_with_aggregate(self, mongo_client):
|
||||
config = {
|
||||
"connectionString": "mongodb://localhost:27017/test",
|
||||
"username": "test_user",
|
||||
"password": "test_pass",
|
||||
"dbName": "test",
|
||||
}
|
||||
mongo_qr = MongoDB(config)
|
||||
|
||||
query = {
|
||||
"collection": "test",
|
||||
"aggregate": [
|
||||
{"$unwind": "$tags"},
|
||||
{"$group": {"_id": "$tags", "count": {"$sum": 1}}},
|
||||
{"$sort": [{"name": "count", "direction": -1}, {"name": "_id", "direction": -1}]},
|
||||
],
|
||||
}
|
||||
|
||||
return_value = [{"_id": "foo", "count": 10}, {"_id": "bar", "count": 9}]
|
||||
|
||||
expected = {
|
||||
"columns": [
|
||||
{"name": "_id", "friendly_name": "_id", "type": TYPE_STRING},
|
||||
{"name": "count", "friendly_name": "count", "type": TYPE_INTEGER},
|
||||
],
|
||||
"rows": return_value,
|
||||
}
|
||||
|
||||
mongo_client().__getitem__().__getitem__().aggregate.return_value = return_value
|
||||
result, err = mongo_qr.run_query(json_dumps(query), None)
|
||||
self.assertIsNone(err)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
|
||||
class TestParseQueryJson(TestCase):
|
||||
def test_ignores_non_isodate_fields(self):
|
||||
@@ -130,6 +191,7 @@ class TestMongoResults(TestCase):
|
||||
for i, row in enumerate(rows):
|
||||
self.assertDictEqual(row, raw_results[i])
|
||||
|
||||
self.assertEqual(3, len(columns))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column2"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column3"))
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from mock import Mock, patch
|
||||
from rq import Connection
|
||||
from rq.exceptions import NoSuchJobError
|
||||
from rq.job import JobStatus
|
||||
|
||||
from redash import models, rq_redis_connection
|
||||
from redash.query_runner.pg import PostgreSQL
|
||||
@@ -22,7 +21,7 @@ def fetch_job(*args, **kwargs):
|
||||
|
||||
result = Mock()
|
||||
result.id = job_id
|
||||
result.get_status = lambda: JobStatus.STARTED
|
||||
result.is_cancelled = False
|
||||
|
||||
return result
|
||||
|
||||
@@ -108,7 +107,7 @@ class TestEnqueueTask(BaseTestCase):
|
||||
# "cancel" the previous job
|
||||
def cancel_job(*args, **kwargs):
|
||||
job = fetch_job(*args, **kwargs)
|
||||
job.get_status = lambda: JobStatus.CANCELED
|
||||
job.is_cancelled = True
|
||||
return job
|
||||
|
||||
my_fetch_job.side_effect = cancel_job
|
||||
|
||||
@@ -220,6 +220,21 @@ export default function GeneralSettings({ options, data, onOptionsChange }: any)
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
</Select.Option>
|
||||
</Select>
|
||||
<Select
|
||||
label="Sort"
|
||||
defaultValue={options.piesort}
|
||||
onChange={(val: any) => onOptionsChange({ piesort: val })}>
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
<Select.Option value={true}>
|
||||
True
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
</Select.Option>
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
<Select.Option value={false}>
|
||||
False
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
</Select.Option>
|
||||
</Select>
|
||||
</Section>
|
||||
)}
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ const DEFAULT_OPTIONS = {
|
||||
direction: { type: "counterclockwise" },
|
||||
sizemode: "diameter",
|
||||
coefficient: 1,
|
||||
piesort: true,
|
||||
|
||||
// showDataLabels: false, // depends on chart type
|
||||
numberFormat: "0,0[.]00000",
|
||||
|
||||
@@ -92,9 +92,7 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
|
||||
const sourceData = new Map();
|
||||
|
||||
//we hold the labels and values in a dictionary so that we can aggregate multiple values for a single label
|
||||
//once we reach the end of the data, we'll convert the dictionary to separate arrays for labels and values
|
||||
const labelsValuesDict: { [key: string]: any } = {};
|
||||
const labelsValuesMap = new Map();
|
||||
|
||||
const yErrorValues: any = [];
|
||||
each(data, row => {
|
||||
@@ -102,13 +100,12 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
const y = cleanYValue(row.y, seriesYAxis === "y2" ? options.yAxis[1].type : options.yAxis[0].type); // depends on series type!
|
||||
const yError = cleanNumber(row.yError); // always number
|
||||
const size = cleanNumber(row.size); // always number
|
||||
if (x in labelsValuesDict){
|
||||
labelsValuesDict[x] += y;
|
||||
if (labelsValuesMap.has(x)) {
|
||||
labelsValuesMap.set(x, labelsValuesMap.get(x) + y);
|
||||
} else {
|
||||
labelsValuesMap.set(x, y);
|
||||
}
|
||||
else{
|
||||
labelsValuesDict[x] = y;
|
||||
}
|
||||
const aggregatedY = labelsValuesDict[x];
|
||||
const aggregatedY = labelsValuesMap.get(x);
|
||||
|
||||
sourceData.set(x, {
|
||||
x,
|
||||
@@ -121,8 +118,8 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
yErrorValues.push(yError);
|
||||
});
|
||||
|
||||
const xValues = Object.keys(labelsValuesDict);
|
||||
const yValues = Object.values(labelsValuesDict);
|
||||
const xValues = Array.from(labelsValuesMap.keys());
|
||||
const yValues = Array.from(labelsValuesMap.values());
|
||||
|
||||
const plotlySeries = {
|
||||
visible: true,
|
||||
|
||||
@@ -41,9 +41,7 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
const xPosition = (index % cellsInRow) * cellWidth;
|
||||
const yPosition = Math.floor(index / cellsInRow) * cellHeight;
|
||||
|
||||
//we hold the labels and values in a dictionary so that we can aggregate multiple values for a single label
|
||||
//once we reach the end of the data, we'll convert the dictionary to separate arrays for labels and values
|
||||
const labelsValuesDict: { [key: string]: any } = {};
|
||||
const labelsValuesMap = new Map();
|
||||
|
||||
const sourceData = new Map();
|
||||
const seriesTotal = reduce(
|
||||
@@ -58,13 +56,13 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
const x = hasX ? normalizeValue(row.x, options.xAxis.type) : `Slice ${index}`;
|
||||
const y = cleanNumber(row.y);
|
||||
|
||||
if (x in labelsValuesDict){
|
||||
labelsValuesDict[x] += y;
|
||||
if (labelsValuesMap.has(x)) {
|
||||
labelsValuesMap.set(x, labelsValuesMap.get(x) + y);
|
||||
} else {
|
||||
labelsValuesMap.set(x, y);
|
||||
}
|
||||
else{
|
||||
labelsValuesDict[x] = y;
|
||||
}
|
||||
const aggregatedY = labelsValuesDict[x];
|
||||
const aggregatedY = labelsValuesMap.get(x);
|
||||
|
||||
|
||||
sourceData.set(x, {
|
||||
x,
|
||||
@@ -77,8 +75,8 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
const markerColors = map(Array.from(sourceData.values()), data => getValueColor(data.row.x));
|
||||
const textColors = map(markerColors, c => chooseTextColorForBackground(c));
|
||||
|
||||
const labels = Object.keys(labelsValuesDict);
|
||||
const values = Object.values(labelsValuesDict);
|
||||
const labels = Array.from(labelsValuesMap.keys());
|
||||
const values = Array.from(labelsValuesMap.values());
|
||||
|
||||
return {
|
||||
visible: true,
|
||||
@@ -103,6 +101,7 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
y: [yPosition, yPosition + cellHeight - yPadding],
|
||||
},
|
||||
sourceData,
|
||||
sort: options.piesort,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,28 +1,51 @@
|
||||
import { map } from "lodash";
|
||||
import React from "react";
|
||||
import React, { useState } from "react";
|
||||
import { Section, Select } from "@/components/visualizations/editor";
|
||||
import { EditorPropTypes } from "@/visualizations/prop-types";
|
||||
|
||||
const ALLOWED_ITEM_PER_PAGE = [5, 10, 15, 20, 25, 50, 100, 150, 200, 250, 500];
|
||||
|
||||
const ALLOWED_COLS_TO_FIX = [0, 1, 2, 3, 4]
|
||||
|
||||
export default function GridSettings({ options, onOptionsChange }: any) {
|
||||
const numCols = options.columns.length;
|
||||
const maxColsToFix = Math.min(4, numCols - 1);
|
||||
|
||||
return (
|
||||
// @ts-expect-error ts-migrate(2745) FIXME: This JSX tag's 'children' prop expects type 'never... Remove this comment to see the full error message
|
||||
<Section>
|
||||
<Select
|
||||
label="Items per page"
|
||||
data-test="Table.ItemsPerPage"
|
||||
defaultValue={options.itemsPerPage}
|
||||
onChange={(itemsPerPage: any) => onOptionsChange({ itemsPerPage })}>
|
||||
{map(ALLOWED_ITEM_PER_PAGE, value => (
|
||||
// @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message
|
||||
<Select.Option key={`ipp${value}`} value={value} data-test={`Table.ItemsPerPage.${value}`}>
|
||||
{value}
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Section>
|
||||
<React.Fragment>
|
||||
{/* @ts-expect-error ts-migrate(2745) FIXME: This JSX tag's 'children' prop expects type 'never' but its value is 'Element'. */}
|
||||
<Section>
|
||||
<Select
|
||||
label="Items per page"
|
||||
data-test="Table.ItemsPerPage"
|
||||
defaultValue={options.itemsPerPage}
|
||||
onChange={(itemsPerPage: any) => onOptionsChange({ itemsPerPage })}>
|
||||
{map(ALLOWED_ITEM_PER_PAGE, value => (
|
||||
// @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message
|
||||
<Select.Option key={`ipp${value}`} value={value} data-test={`Table.ItemsPerPage.${value}`}>
|
||||
{value}
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Section>
|
||||
{/* @ts-expect-error ts-migrate(2745) FIXME: This JSX tag's 'children' prop expects type 'never' but its value is 'Element'. */}
|
||||
<Section>
|
||||
<Select
|
||||
label="Number of Columns to Fix in Place"
|
||||
data-test="FixedColumns"
|
||||
defaultValue={options.fixedColumns}
|
||||
onChange={(fixedColumns: number) => {onOptionsChange({ fixedColumns })}}>
|
||||
{map(ALLOWED_COLS_TO_FIX.slice(0, maxColsToFix + 1), value => (
|
||||
// @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message
|
||||
<Select.Option key={`fc${value}`} value={value}>
|
||||
{value}
|
||||
{/* @ts-expect-error ts-migrate(2339) FIXME: Property 'Option' does not exist on type '({ class... Remove this comment to see the full error message */}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Section>
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ Object {
|
||||
],
|
||||
"dateTimeFormat": undefined,
|
||||
"displayAs": "string",
|
||||
"fixed": false,
|
||||
"highlightLinks": false,
|
||||
"imageHeight": "",
|
||||
"imageTitleTemplate": "{{ @ }}",
|
||||
@@ -46,6 +47,7 @@ Object {
|
||||
],
|
||||
"dateTimeFormat": undefined,
|
||||
"displayAs": "number",
|
||||
"fixed": false,
|
||||
"highlightLinks": false,
|
||||
"imageHeight": "",
|
||||
"imageTitleTemplate": "{{ @ }}",
|
||||
@@ -79,6 +81,7 @@ Object {
|
||||
],
|
||||
"dateTimeFormat": undefined,
|
||||
"displayAs": "string",
|
||||
"fixed": false,
|
||||
"highlightLinks": false,
|
||||
"imageHeight": "",
|
||||
"imageTitleTemplate": "{{ @ }}",
|
||||
@@ -112,6 +115,7 @@ Object {
|
||||
],
|
||||
"dateTimeFormat": undefined,
|
||||
"displayAs": "string",
|
||||
"fixed": false,
|
||||
"highlightLinks": false,
|
||||
"imageHeight": "",
|
||||
"imageTitleTemplate": "{{ @ }}",
|
||||
@@ -145,6 +149,7 @@ Object {
|
||||
],
|
||||
"dateTimeFormat": undefined,
|
||||
"displayAs": "string",
|
||||
"fixed": false,
|
||||
"highlightLinks": false,
|
||||
"imageHeight": "",
|
||||
"imageTitleTemplate": "{{ @ }}",
|
||||
|
||||
@@ -84,6 +84,13 @@ export default function Renderer({ options, data }: any) {
|
||||
const [searchTerm, setSearchTerm] = useState("");
|
||||
const [orderBy, setOrderBy] = useState([]);
|
||||
|
||||
const columnsToFix = new Set<string>();
|
||||
for (let i = 0; i < options.fixedColumns; i++) {
|
||||
if (options.columns[i]) {
|
||||
columnsToFix.add(options.columns[i].name);
|
||||
}
|
||||
}
|
||||
|
||||
const searchColumns = useMemo(() => filter(options.columns, "allowSearch"), [options.columns]);
|
||||
|
||||
const tableColumns = useMemo(() => {
|
||||
@@ -97,7 +104,7 @@ export default function Renderer({ options, data }: any) {
|
||||
// Remove text selection - may occur accidentally
|
||||
// @ts-expect-error ts-migrate(2531) FIXME: Object is possibly 'null'.
|
||||
document.getSelection().removeAllRanges();
|
||||
});
|
||||
}, columnsToFix);
|
||||
}, [options.columns, searchColumns, orderBy]);
|
||||
|
||||
const preparedRows = useMemo(() => sortRows(filterRows(initRows(data.rows), searchTerm, searchColumns), orderBy), [
|
||||
@@ -134,6 +141,7 @@ export default function Renderer({ options, data }: any) {
|
||||
showSizeChanger: false,
|
||||
}}
|
||||
showSorterTooltip={false}
|
||||
scroll = {{x : 'max-content'}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -4,6 +4,7 @@ import { visualizationsSettings } from "@/visualizations/visualizationsSettings"
|
||||
const DEFAULT_OPTIONS = {
|
||||
itemsPerPage: 25,
|
||||
paginationSize: "default", // not editable through Editor
|
||||
fixedColumns: 0,
|
||||
};
|
||||
|
||||
const filterTypes = ["filter", "multi-filter", "multiFilter"];
|
||||
@@ -56,6 +57,7 @@ function getDefaultColumnsOptions(columns: any) {
|
||||
// `string` cell options
|
||||
allowHTML: true,
|
||||
highlightLinks: false,
|
||||
fixed: false,
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
left: 0;
|
||||
top: 0;
|
||||
border-top: 0;
|
||||
z-index: 1;
|
||||
background: #fafafa !important;
|
||||
}
|
||||
}
|
||||
@@ -157,3 +156,11 @@
|
||||
color: @text-color-secondary;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-table-cell-fix-left{
|
||||
background-color: #fff !important;
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr.ant-table-row:hover > .ant-table-cell-fix-left {
|
||||
background-color: rgb(248, 249, 250) !important;
|
||||
}
|
||||
@@ -50,7 +50,7 @@ function getOrderByInfo(orderBy: any) {
|
||||
return result;
|
||||
}
|
||||
|
||||
export function prepareColumns(columns: any, searchInput: any, orderBy: any, onOrderByChange: any) {
|
||||
export function prepareColumns(columns: any, searchInput: any, orderBy: any, onOrderByChange: any, columnsToFix: Set<string>) {
|
||||
columns = filter(columns, "visible");
|
||||
columns = sortBy(columns, "order");
|
||||
|
||||
@@ -96,6 +96,7 @@ export function prepareColumns(columns: any, searchInput: any, orderBy: any, onO
|
||||
}),
|
||||
onClick: (event: any) => onOrderByChange(toggleOrderBy(column.name, orderBy, event.shiftKey)),
|
||||
}),
|
||||
fixed: columnsToFix.has(column.name) ? 'left' : false
|
||||
};
|
||||
|
||||
// @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
|
||||
|
||||
@@ -10659,9 +10659,9 @@ write-file-atomic@2.4.1, write-file-atomic@^2.3.0:
|
||||
signal-exit "^3.0.2"
|
||||
|
||||
ws@^5.2.0:
|
||||
version "5.2.3"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.3.tgz#05541053414921bc29c63bee14b8b0dd50b07b3d"
|
||||
integrity sha512-jZArVERrMsKUatIdnLzqvcfydI85dvd/Fp1u/VOpfdDWQ4c9qWXe+VIeAbQ5FrDwciAkr+lzofXLz3Kuf26AOA==
|
||||
version "5.2.4"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.4.tgz#c7bea9f1cfb5f410de50e70e82662e562113f9a7"
|
||||
integrity sha512-fFCejsuC8f9kOSu9FYaOw8CdO68O3h5v0lg4p74o8JqWpwTf9tniOD+nOB78aWoVSS6WptVUmDrp/KPsMVBWFQ==
|
||||
dependencies:
|
||||
async-limiter "~1.0.0"
|
||||
|
||||
|
||||
18
yarn.lock
18
yarn.lock
@@ -15700,21 +15700,21 @@ write@1.0.3:
|
||||
mkdirp "^0.5.1"
|
||||
|
||||
ws@^5.2.0:
|
||||
version "5.2.3"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.3.tgz#05541053414921bc29c63bee14b8b0dd50b07b3d"
|
||||
integrity sha512-jZArVERrMsKUatIdnLzqvcfydI85dvd/Fp1u/VOpfdDWQ4c9qWXe+VIeAbQ5FrDwciAkr+lzofXLz3Kuf26AOA==
|
||||
version "5.2.4"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.4.tgz#c7bea9f1cfb5f410de50e70e82662e562113f9a7"
|
||||
integrity sha512-fFCejsuC8f9kOSu9FYaOw8CdO68O3h5v0lg4p74o8JqWpwTf9tniOD+nOB78aWoVSS6WptVUmDrp/KPsMVBWFQ==
|
||||
dependencies:
|
||||
async-limiter "~1.0.0"
|
||||
|
||||
ws@^7.2.3, ws@^7.3.1:
|
||||
version "7.5.9"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
|
||||
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
|
||||
version "7.5.10"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
|
||||
integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
|
||||
|
||||
ws@^8.13.0:
|
||||
version "8.13.0"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0"
|
||||
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==
|
||||
version "8.17.1"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b"
|
||||
integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==
|
||||
|
||||
xml-name-validator@^3.0.0:
|
||||
version "3.0.0"
|
||||
|
||||
Reference in New Issue
Block a user