mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 10:00:45 -05:00
Compare commits
1 Commits
24.07.0-de
...
24.01.0-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
542c1c324c |
@@ -1,11 +1,11 @@
|
||||
FROM cypress/browsers:node18.12.0-chrome106-ff106
|
||||
FROM cypress/browsers:node16.18.0-chrome90-ff88
|
||||
|
||||
ENV APP /usr/src/app
|
||||
WORKDIR $APP
|
||||
|
||||
COPY package.json yarn.lock .yarnrc $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm install yarn@1.22.22 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
RUN npm install yarn@1.22.19 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
version: '2.2'
|
||||
services:
|
||||
redash:
|
||||
build: ../
|
||||
@@ -18,7 +19,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@@ -1,3 +1,4 @@
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: ../
|
||||
@@ -66,7 +67,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
@@ -1,4 +1,5 @@
|
||||
client/.tmp/
|
||||
client/dist/
|
||||
node_modules/
|
||||
viz-lib/node_modules/
|
||||
.tmp/
|
||||
|
||||
147
.github/workflows/ci.yml
vendored
147
.github/workflows/ci.yml
vendored
@@ -3,24 +3,17 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
NODE_VERSION: 16.20.1
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install black==23.1.0 ruff==0.0.287
|
||||
@@ -31,18 +24,14 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.ci.yaml
|
||||
COMPOSE_FILE: .ci/docker-compose.ci.yml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
@@ -76,20 +65,16 @@ jobs:
|
||||
frontend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
@@ -103,20 +88,16 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
@@ -128,22 +109,18 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.cypress.yaml
|
||||
COMPOSE_FILE: .ci/docker-compose.cypress.yml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
|
||||
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
|
||||
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
run: exit 1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
@@ -153,7 +130,7 @@ jobs:
|
||||
echo "CODE_COVERAGE=true" >> "$GITHUB_ENV"
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
@@ -173,3 +150,89 @@ jobs:
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage
|
||||
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
- name: Skip?
|
||||
id: skip-check
|
||||
run: |
|
||||
if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then
|
||||
echo 'Docker user is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.ref_name }}" != 'master' ]]; then
|
||||
echo 'Ref name is not `master`. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
echo skip=false >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- backend-unit-tests
|
||||
- frontend-unit-tests
|
||||
- frontend-e2e-tests
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set up QEMU
|
||||
timeout-minutes: 1
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Bump version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
.ci/update_version
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG="${VERSION}.b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}"
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push preview image to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
|
||||
24
.github/workflows/periodic-snapshot.yml
vendored
24
.github/workflows/periodic-snapshot.yml
vendored
@@ -3,7 +3,7 @@ name: Periodic Snapshot
|
||||
# 10 minutes after midnight on the first of every month
|
||||
on:
|
||||
schedule:
|
||||
- cron: '10 0 1 * *'
|
||||
- cron: "10 0 1 * *"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -13,18 +13,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ssh-key: ${{secrets.ACTION_PUSH_KEY}}
|
||||
- run: |
|
||||
# https://api.github.com/users/github-actions[bot]
|
||||
git config user.name 'github-actions[bot]'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
date="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=$date '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${TAG_NAME}"
|
||||
git tag ${TAG_NAME}
|
||||
git push --atomic origin master refs/tags/${TAG_NAME}
|
||||
git commit -m "Snapshot: ${date}"
|
||||
git tag $date
|
||||
git push --atomic origin master refs/tags/$date
|
||||
|
||||
87
.github/workflows/preview-image.yml
vendored
87
.github/workflows/preview-image.yml
vendored
@@ -1,87 +0,0 @@
|
||||
name: Preview Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*-dev'
|
||||
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
|
||||
jobs:
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
- name: Skip?
|
||||
id: skip-check
|
||||
run: |
|
||||
if [[ "${{ vars.DOCKER_USER }}" == '' ]]; then
|
||||
echo 'Docker user is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
|
||||
echo 'Docker password is empty. Skipping build+push'
|
||||
echo skip=true >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo 'Docker user and password are set and branch is `master`.'
|
||||
echo 'Building + pushing `preview` image.'
|
||||
echo skip=false >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
build-docker-image:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.push.after }}
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
.ci/update_version
|
||||
VERSION_TAG=$(jq -r .version package.json)
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push preview image to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
@@ -1,6 +1,6 @@
|
||||
FROM node:18-bookworm as frontend-builder
|
||||
FROM node:16.20.1-bookworm as frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
RUN npm install --global --force yarn@1.22.19
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
@@ -14,7 +14,6 @@ USER redash
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
COPY --chown=redash scripts /frontend/scripts
|
||||
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
@@ -26,7 +25,7 @@ COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
|
||||
FROM python:3.10-slim-bookworm
|
||||
FROM python:3.8-slim-bookworm
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
@@ -68,7 +67,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg \
|
||||
&& curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18 \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
|
||||
18
Makefile
18
Makefile
@@ -1,4 +1,4 @@
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
@@ -17,21 +17,7 @@ create_database: .env
|
||||
docker compose run server create_db
|
||||
|
||||
clean:
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
docker compose rm --stop --force
|
||||
docker compose --project-name cypress rm --stop --force
|
||||
docker image rm --force \
|
||||
cypress-server:latest cypress-worker:latest cypress-scheduler:latest \
|
||||
redash-server:latest redash-worker:latest redash-scheduler:latest
|
||||
docker container prune --force
|
||||
docker image prune --force
|
||||
docker volume prune --force
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
docker compose down && docker compose rm
|
||||
|
||||
down:
|
||||
docker compose down
|
||||
|
||||
@@ -84,7 +84,6 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Python
|
||||
- Qubole
|
||||
- Rockset
|
||||
- RisingWave
|
||||
- Salesforce
|
||||
- ScyllaDB
|
||||
- Shell Scripts
|
||||
|
||||
BIN
client/app/assets/images/db-logos/qubole.png
Normal file
BIN
client/app/assets/images/db-logos/qubole.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.4 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 9.7 KiB |
@@ -1,5 +1,6 @@
|
||||
import React from "react";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import { clientConfig, currentUser } from "@/services/auth";
|
||||
import frontendVersion from "@/version.json";
|
||||
|
||||
export default function VersionInfo() {
|
||||
@@ -9,6 +10,15 @@ export default function VersionInfo() {
|
||||
Version: {clientConfig.version}
|
||||
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
|
||||
</div>
|
||||
{clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
|
||||
<div className="m-t-10">
|
||||
{/* eslint-disable react/jsx-no-target-blank */}
|
||||
<Link href="https://version.redash.io/" className="update-available" target="_blank" rel="noopener">
|
||||
Update Available <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
79
client/app/components/BeaconConsent.jsx
Normal file
79
client/app/components/BeaconConsent.jsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import React, { useState } from "react";
|
||||
import Card from "antd/lib/card";
|
||||
import Button from "antd/lib/button";
|
||||
import Typography from "antd/lib/typography";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import OrgSettings from "@/services/organizationSettings";
|
||||
|
||||
const Text = Typography.Text;
|
||||
|
||||
function BeaconConsent() {
|
||||
const [hide, setHide] = useState(false);
|
||||
|
||||
if (!clientConfig.showBeaconConsentMessage || hide) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hideConsentCard = () => {
|
||||
clientConfig.showBeaconConsentMessage = false;
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = confirm => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
message = "Settings Saved.";
|
||||
}
|
||||
|
||||
OrgSettings.save({ beacon_consent: confirm }, message)
|
||||
// .then(() => {
|
||||
// // const settings = get(response, 'settings');
|
||||
// // this.setState({ settings, formValues: { ...settings } });
|
||||
// })
|
||||
.finally(hideConsentCard);
|
||||
};
|
||||
|
||||
return (
|
||||
<DynamicComponent name="BeaconConsent">
|
||||
<div className="m-t-10 tiled">
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
Would you be ok with sharing anonymous usage data with the Redash team?{" "}
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
<li> Number of users, queries, dashboards, alerts, widgets and visualizations.</li>
|
||||
<li> Types of data sources, alert destinations and visualizations.</li>
|
||||
</ul>
|
||||
</div>
|
||||
<Text>All data is aggregated and will never include any sensitive or private data.</Text>
|
||||
<div className="m-t-5">
|
||||
<Button type="primary" className="m-r-5" onClick={() => confirmConsent(true)}>
|
||||
Yes
|
||||
</Button>
|
||||
<Button type="default" onClick={() => confirmConsent(false)}>
|
||||
No
|
||||
</Button>
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the{" "}
|
||||
<Link href="settings/organization">Organization Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
export default BeaconConsent;
|
||||
@@ -23,6 +23,7 @@ export const TYPES = mapValues(
|
||||
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
|
||||
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
|
||||
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
|
||||
USAGE_DATA_SHARING: ["/open-source/admin-guide/usage-data", "Help: Anonymous Usage Data Sharing"],
|
||||
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
|
||||
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
|
||||
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
|
||||
|
||||
@@ -148,9 +148,7 @@ function EditVisualizationDialog({ dialog, visualization, query, queryResult })
|
||||
|
||||
function dismiss() {
|
||||
const optionsChanged = !isEqual(options, defaultState.originalOptions);
|
||||
confirmDialogClose(nameChanged || optionsChanged)
|
||||
.then(dialog.dismiss)
|
||||
.catch(() => {});
|
||||
confirmDialogClose(nameChanged || optionsChanged).then(dialog.dismiss);
|
||||
}
|
||||
|
||||
// When editing existing visualization chart type selector is disabled, so add only existing visualization's
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<base href="{{base_href}}" />
|
||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||
<script src="<%= htmlWebpackPlugin.options.staticPath %>unsupportedRedirect.js" async></script>
|
||||
<script src="/static/unsupportedRedirect.js" async></script>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/images/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/static/images/favicon-96x96.png" />
|
||||
|
||||
@@ -118,9 +118,28 @@ class ShareDashboardDialog extends React.Component {
|
||||
/>
|
||||
</Form.Item>
|
||||
{dashboard.public_url && (
|
||||
<Form.Item label="Secret address" {...this.formItemProps}>
|
||||
<InputWithCopy value={dashboard.public_url} data-test="SecretAddress" />
|
||||
</Form.Item>
|
||||
<>
|
||||
<Form.Item>
|
||||
<Alert
|
||||
message={
|
||||
<div>
|
||||
Custom rule for hiding filter components when sharing links:
|
||||
<br />
|
||||
You can hide filter components by appending `&hide_filter={"{{"} component_name{"}}"}` to the
|
||||
sharing URL.
|
||||
<br />
|
||||
Example: http://{"{{"}ip{"}}"}:{"{{"}port{"}}"}/public/dashboards/{"{{"}id{"}}"}
|
||||
?p_country=ghana&p_site=10&hide_filter=country
|
||||
</div>
|
||||
}
|
||||
type="warning"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item label="Secret address" {...this.formItemProps}>
|
||||
<InputWithCopy value={dashboard.public_url} data-test="SecretAddress" />
|
||||
</Form.Item>
|
||||
</>
|
||||
)}
|
||||
</Form>
|
||||
</Modal>
|
||||
|
||||
@@ -6,6 +6,7 @@ import Link from "@/components/Link";
|
||||
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
||||
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import BeaconConsent from "@/components/BeaconConsent";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
import { axios } from "@/services/axios";
|
||||
@@ -88,6 +89,7 @@ export default function Home() {
|
||||
</DynamicComponent>
|
||||
<DynamicComponent name="HomeExtra" />
|
||||
<DashboardAndQueryFavoritesList />
|
||||
<BeaconConsent />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
import React from "react";
|
||||
import Form from "antd/lib/form";
|
||||
import Checkbox from "antd/lib/checkbox";
|
||||
import Skeleton from "antd/lib/skeleton";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
|
||||
|
||||
export default function BeaconConsentSettings(props) {
|
||||
const { values, onChange, loading } = props;
|
||||
|
||||
return (
|
||||
<DynamicComponent name="OrganizationSettings.BeaconConsentSettings" {...props}>
|
||||
<Form.Item
|
||||
label={
|
||||
<span>
|
||||
Anonymous Usage Data Sharing
|
||||
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||
</span>
|
||||
}>
|
||||
{loading ? (
|
||||
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||
) : (
|
||||
<Checkbox
|
||||
name="beacon_consent"
|
||||
checked={values.beacon_consent}
|
||||
onChange={e => onChange({ beacon_consent: e.target.checked })}>
|
||||
Help Redash improve by automatically sending anonymous usage data
|
||||
</Checkbox>
|
||||
)}
|
||||
</Form.Item>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
|
||||
|
||||
BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;
|
||||
@@ -4,6 +4,7 @@ import DynamicComponent from "@/components/DynamicComponent";
|
||||
import FormatSettings from "./FormatSettings";
|
||||
import PlotlySettings from "./PlotlySettings";
|
||||
import FeatureFlagsSettings from "./FeatureFlagsSettings";
|
||||
import BeaconConsentSettings from "./BeaconConsentSettings";
|
||||
|
||||
export default function GeneralSettings(props) {
|
||||
return (
|
||||
@@ -13,6 +14,7 @@ export default function GeneralSettings(props) {
|
||||
<FormatSettings {...props} />
|
||||
<PlotlySettings {...props} />
|
||||
<FeatureFlagsSettings {...props} />
|
||||
<BeaconConsentSettings {...props} />
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable import/no-extraneous-dependencies, no-console */
|
||||
const { find } = require("lodash");
|
||||
const atob = require("atob");
|
||||
const { execSync } = require("child_process");
|
||||
const { get, post } = require("request").defaults({ jar: true });
|
||||
const { seedData } = require("./seed-data");
|
||||
@@ -59,11 +60,23 @@ function stopServer() {
|
||||
|
||||
function runCypressCI() {
|
||||
const {
|
||||
PERCY_TOKEN_ENCODED,
|
||||
CYPRESS_PROJECT_ID_ENCODED,
|
||||
CYPRESS_RECORD_KEY_ENCODED,
|
||||
GITHUB_REPOSITORY,
|
||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||
} = process.env;
|
||||
|
||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
||||
if (PERCY_TOKEN_ENCODED) {
|
||||
process.env.PERCY_TOKEN = atob(`${PERCY_TOKEN_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_PROJECT_ID_ENCODED) {
|
||||
process.env.CYPRESS_PROJECT_ID = atob(`${CYPRESS_PROJECT_ID_ENCODED}`);
|
||||
}
|
||||
if (CYPRESS_RECORD_KEY_ENCODED) {
|
||||
process.env.CYPRESS_RECORD_KEY = atob(`${CYPRESS_RECORD_KEY_ENCODED}`);
|
||||
}
|
||||
process.env.CYPRESS_OPTIONS = "--record";
|
||||
}
|
||||
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
services:
|
||||
.redash:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
FRONTEND_BUILD_MODE: ${FRONTEND_BUILD_MODE:-2}
|
||||
INSTALL_GROUPS: ${INSTALL_GROUPS:-main,all_ds,dev}
|
||||
volumes:
|
||||
- $PWD:${SERVER_MOUNT:-/ignore}
|
||||
command: manage version
|
||||
environment:
|
||||
REDASH_LOG_LEVEL: INFO
|
||||
REDASH_REDIS_URL: redis://redis:6379/0
|
||||
REDASH_DATABASE_URL: postgresql://postgres@postgres/postgres
|
||||
REDASH_RATELIMIT_ENABLED: false
|
||||
REDASH_MAIL_DEFAULT_SENDER: redash@example.com
|
||||
REDASH_MAIL_SERVER: email
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: true
|
||||
REDASH_COOKIE_SECRET: ${REDASH_COOKIE_SECRET}
|
||||
REDASH_SECRET_KEY: ${REDASH_SECRET_KEY}
|
||||
REDASH_PRODUCTION: ${REDASH_PRODUCTION:-true}
|
||||
env_file:
|
||||
- .env
|
||||
@@ -1,5 +1,6 @@
|
||||
# This configuration file is for the **development** setup.
|
||||
# For a production example please refer to getredash/setup repository on GitHub.
|
||||
version: "2.2"
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: .
|
||||
@@ -52,7 +53,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
ports:
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
@@ -7,7 +7,7 @@ Create Date: 2020-12-23 21:35:32.766354
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0ec979123ba4'
|
||||
@@ -18,7 +18,7 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('dashboards', sa.Column('options', JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||
op.add_column('dashboards', sa.Column('options', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,8 @@ import json
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table
|
||||
from redash.models import MutableDict
|
||||
|
||||
from redash.models import MutableDict, PseudoJSON
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -40,7 +41,7 @@ def upgrade():
|
||||
"queries",
|
||||
sa.Column(
|
||||
"schedule",
|
||||
sa.Text(),
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
@@ -50,7 +51,7 @@ def upgrade():
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", sa.String(length=10)),
|
||||
)
|
||||
|
||||
@@ -84,7 +85,7 @@ def downgrade():
|
||||
"queries",
|
||||
sa.Column(
|
||||
"old_schedule",
|
||||
sa.Text(),
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
@@ -92,8 +93,8 @@ def downgrade():
|
||||
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("schedule", sa.Text()),
|
||||
sa.Column("old_schedule", sa.Text()),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
)
|
||||
|
||||
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
|
||||
@@ -105,7 +106,7 @@ def downgrade():
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", sa.String(length=10)),
|
||||
sa.Column("old_schedule", sa.Text()),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
"""change type of json fields from varchar to json
|
||||
|
||||
Revision ID: 7205816877ec
|
||||
Revises: 7ce5925f832b
|
||||
Create Date: 2024-01-03 13:55:18.885021
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB, JSON
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7205816877ec'
|
||||
down_revision = '7ce5925f832b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
connection = op.get_bind()
|
||||
op.alter_column('queries', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='schedule::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='additional_properties::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='settings::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='layout::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='change::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('visualizations', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('widgets', 'options',
|
||||
existing_type=sa.Text(),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::jsonb',
|
||||
server_default=sa.text("'{}'::jsonb"))
|
||||
|
||||
|
||||
def downgrade():
|
||||
connection = op.get_bind()
|
||||
op.alter_column('queries', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='options::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('queries', 'schedule',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='schedule::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('events', 'additional_properties',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='additional_properties::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('organizations', 'settings',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='settings::text',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('alerts', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
existing_nullable=True,
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('dashboards', 'options',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='options::json',
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('dashboards', 'layout',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=sa.Text(),
|
||||
postgresql_using='layout::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('changes', 'change',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
postgresql_using='change::json',
|
||||
server_default=sa.text("'{}'::json"))
|
||||
op.alter_column('visualizations', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
op.alter_column('widgets', 'options',
|
||||
type_=sa.Text(),
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
postgresql_using='options::text',
|
||||
server_default=sa.text("'{}'::text"))
|
||||
@@ -7,9 +7,10 @@ Create Date: 2019-01-17 13:22:21.729334
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.sql import table
|
||||
|
||||
from redash.models import MutableDict
|
||||
from redash.models import MutableDict, PseudoJSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "73beceabb948"
|
||||
@@ -42,7 +43,7 @@ def upgrade():
|
||||
queries = table(
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", sa.Text()),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -6,7 +6,7 @@ Create Date: 2018-01-31 15:20:30.396533
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
import simplejson
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
@@ -27,7 +27,7 @@ def upgrade():
|
||||
dashboard_result = db.session.execute("SELECT id, layout FROM dashboards")
|
||||
for dashboard in dashboard_result:
|
||||
print(" Updating dashboard: {}".format(dashboard["id"]))
|
||||
layout = json.loads(dashboard["layout"])
|
||||
layout = simplejson.loads(dashboard["layout"])
|
||||
|
||||
print(" Building widgets map:")
|
||||
widgets = {}
|
||||
@@ -53,7 +53,7 @@ def upgrade():
|
||||
if widget is None:
|
||||
continue
|
||||
|
||||
options = json.loads(widget["options"]) or {}
|
||||
options = simplejson.loads(widget["options"]) or {}
|
||||
options["position"] = {
|
||||
"row": row_index,
|
||||
"col": column_index * column_size,
|
||||
@@ -62,7 +62,7 @@ def upgrade():
|
||||
|
||||
db.session.execute(
|
||||
"UPDATE widgets SET options=:options WHERE id=:id",
|
||||
{"options": json.dumps(options), "id": widget_id},
|
||||
{"options": simplejson.dumps(options), "id": widget_id},
|
||||
)
|
||||
|
||||
dashboard_result.close()
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2019-01-31 09:21:31.517265
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import BYTEA
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
|
||||
@@ -15,8 +15,10 @@ from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
PseudoJSON,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -29,7 +31,7 @@ depends_on = None
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"data_sources",
|
||||
sa.Column("encrypted_options", BYTEA(), nullable=True),
|
||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True),
|
||||
)
|
||||
|
||||
# copy values
|
||||
@@ -44,14 +46,7 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -9,7 +9,7 @@ import re
|
||||
from funcy import flatten, compact
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from redash import models
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -21,10 +21,10 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"dashboards", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||
"dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
op.add_column(
|
||||
"queries", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||
"queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,14 +7,17 @@ Create Date: 2020-12-14 21:42:48.661684
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import BYTEA
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.sql import table
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
|
||||
from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.base import key_type
|
||||
from redash.models.types import EncryptedConfiguration
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
)
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -27,7 +30,7 @@ depends_on = None
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"notification_destinations",
|
||||
sa.Column("encrypted_options", BYTEA(), nullable=True)
|
||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True)
|
||||
)
|
||||
|
||||
# copy values
|
||||
@@ -42,14 +45,7 @@ def upgrade():
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column(
|
||||
"options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2018-11-08 16:12:17.023569
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "e7f8a917aa8e"
|
||||
@@ -21,7 +21,7 @@ def upgrade():
|
||||
"users",
|
||||
sa.Column(
|
||||
"details",
|
||||
JSON(astext_type=sa.Text()),
|
||||
postgresql.JSON(astext_type=sa.Text()),
|
||||
server_default="{}",
|
||||
nullable=True,
|
||||
),
|
||||
|
||||
@@ -7,7 +7,7 @@ Create Date: 2022-01-31 15:24:16.507888
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSON, JSONB
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
from redash.models import db
|
||||
|
||||
@@ -23,8 +23,8 @@ def upgrade():
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=JSON(astext_type=sa.Text()),
|
||||
type_=JSONB(astext_type=sa.Text()),
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
### end Alembic commands ###
|
||||
@@ -52,8 +52,8 @@ def downgrade():
|
||||
connection.execute(update_query)
|
||||
db.session.commit()
|
||||
op.alter_column('users', 'details',
|
||||
existing_type=JSONB(astext_type=sa.Text()),
|
||||
type_=JSON(astext_type=sa.Text()),
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::json"))
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
command = "cd ../ && yarn cache clean && yarn --frozen-lockfile --network-concurrency 1 && yarn build && cd ./client"
|
||||
|
||||
[build.environment]
|
||||
NODE_VERSION = "18"
|
||||
NODE_VERSION = "16.20.1"
|
||||
NETLIFY_USE_YARN = "true"
|
||||
YARN_VERSION = "1.22.19"
|
||||
CYPRESS_INSTALL_BINARY = "0"
|
||||
|
||||
18
package.json
18
package.json
@@ -1,19 +1,20 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "24.07.0-dev",
|
||||
"version": "24.01.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "npm-run-all --parallel watch:viz webpack-dev-server",
|
||||
"clean": "rm -rf ./client/dist/",
|
||||
"build:viz": "(cd viz-lib && yarn build:babel)",
|
||||
"build": "yarn clean && yarn build:viz && NODE_OPTIONS=--openssl-legacy-provider NODE_ENV=production webpack",
|
||||
"watch:app": "NODE_OPTIONS=--openssl-legacy-provider webpack watch --progress",
|
||||
"build": "yarn clean && yarn build:viz && NODE_ENV=production webpack",
|
||||
"build:old-node-version": "yarn clean && NODE_ENV=production node --max-old-space-size=4096 node_modules/.bin/webpack",
|
||||
"watch:app": "webpack watch --progress",
|
||||
"watch:viz": "(cd viz-lib && yarn watch:babel)",
|
||||
"watch": "npm-run-all --parallel watch:*",
|
||||
"webpack-dev-server": "webpack-dev-server",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on NODE_OPTIONS=--openssl-legacy-provider webpack",
|
||||
"analyze": "yarn clean && BUNDLE_ANALYZER=on webpack",
|
||||
"analyze:build": "yarn clean && NODE_ENV=production BUNDLE_ANALYZER=on webpack",
|
||||
"lint": "yarn lint:base --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:fix": "yarn lint:base --fix --ext .js --ext .jsx --ext .ts --ext .tsx ./client",
|
||||
"lint:base": "eslint --config ./client/.eslintrc.js --ignore-path ./client/.eslintignore",
|
||||
@@ -33,8 +34,7 @@
|
||||
"url": "git+https://github.com/getredash/redash.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">16.0 <21.0",
|
||||
"npm": "please-use-yarn",
|
||||
"node": ">14.16.0 <17.0.0",
|
||||
"yarn": "^1.22.10"
|
||||
},
|
||||
"author": "Redash Contributors",
|
||||
@@ -178,10 +178,6 @@
|
||||
"viz-lib/**"
|
||||
]
|
||||
},
|
||||
"browser": {
|
||||
"fs": false,
|
||||
"path": false
|
||||
},
|
||||
"//": "browserslist set to 'Async functions' compatibility",
|
||||
"browserslist": [
|
||||
"Edge >= 15",
|
||||
|
||||
1373
poetry.lock
generated
1373
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ force-exclude = '''
|
||||
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "24.07.0-dev"
|
||||
version = "24.01.0-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
@@ -43,10 +43,10 @@ flask-wtf = "1.1.1"
|
||||
funcy = "1.13"
|
||||
gevent = "23.9.1"
|
||||
greenlet = "2.0.2"
|
||||
gunicorn = "22.0.0"
|
||||
gunicorn = "20.0.4"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.4"
|
||||
jinja2 = "3.1.2"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
@@ -64,28 +64,28 @@ pytz = ">=2019.3"
|
||||
pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.32.0"
|
||||
requests = "2.31.0"
|
||||
restrictedpython = "6.2"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
rq = "1.9.0"
|
||||
rq-scheduler = "0.11.0"
|
||||
semver = "2.8.1"
|
||||
sentry-sdk = "1.28.1"
|
||||
simplejson = "3.16.0"
|
||||
sqlalchemy = "1.3.24"
|
||||
sqlalchemy-searchable = "1.2.0"
|
||||
sqlalchemy-utils = "0.38.3"
|
||||
sqlparse = "0.5.0"
|
||||
sqlalchemy-utils = "0.34.2"
|
||||
sqlparse = "0.4.4"
|
||||
sshtunnel = "0.1.5"
|
||||
statsd = "3.3.0"
|
||||
supervisor = "4.1.0"
|
||||
supervisor-checks = "0.8.1"
|
||||
ua-parser = "0.18.0"
|
||||
urllib3 = "1.26.19"
|
||||
urllib3 = "1.26.18"
|
||||
user-agents = "2.0"
|
||||
werkzeug = "2.3.8"
|
||||
wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
@@ -111,7 +111,7 @@ nzalchemy = "^11.0.2"
|
||||
nzpy = ">=1.15"
|
||||
oauth2client = "4.1.3"
|
||||
openpyxl = "3.0.7"
|
||||
oracledb = "2.1.2"
|
||||
oracledb = "1.4.0"
|
||||
pandas = "1.3.4"
|
||||
phoenixdb = "0.7"
|
||||
pinotdb = ">=0.4.5"
|
||||
@@ -122,11 +122,12 @@ pydruid = "0.5.7"
|
||||
pyexasol = "0.12.0"
|
||||
pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymongo = { version = "4.3.3", extras = ["srv", "tls"] }
|
||||
pymssql = "2.2.8"
|
||||
pyodbc = "5.1.0"
|
||||
pyodbc = "4.0.28"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.1.0"
|
||||
qds-sdk = ">=1.9.6"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
@@ -152,7 +153,7 @@ optional = true
|
||||
pytest = "7.4.0"
|
||||
coverage = "7.2.7"
|
||||
freezegun = "1.2.1"
|
||||
jwcrypto = "1.5.6"
|
||||
jwcrypto = "1.5.1"
|
||||
mock = "5.0.2"
|
||||
pre-commit = "3.3.3"
|
||||
ptpython = "3.0.23"
|
||||
@@ -168,7 +169,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
[tool.ruff]
|
||||
exclude = [".git", "viz-lib", "node_modules", "migrations"]
|
||||
ignore = ["E501"]
|
||||
select = ["C9", "E", "F", "W", "I001", "UP004"]
|
||||
select = ["C9", "E", "F", "W", "I001"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 15
|
||||
|
||||
@@ -14,7 +14,7 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "24.07.0-dev"
|
||||
__version__ = "24.01.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
|
||||
@@ -36,10 +36,14 @@ def create_app():
|
||||
from .metrics import request as request_metrics
|
||||
from .models import db, users
|
||||
from .utils import sentry
|
||||
from .version_check import reset_new_version_status
|
||||
|
||||
sentry.init()
|
||||
app = Redash()
|
||||
|
||||
# Check and update the cached version for use by the client
|
||||
reset_new_version_status()
|
||||
|
||||
security.init_app(app)
|
||||
request_metrics.init_app(app)
|
||||
db.init_app(app)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger("jwt_auth")
|
||||
|
||||
@@ -25,7 +25,7 @@ def get_public_key_from_net(url):
|
||||
if "keys" in data:
|
||||
public_keys = []
|
||||
for key_dict in data["keys"]:
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(json.dumps(key_dict))
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(simplejson.dumps(key_dict))
|
||||
public_keys.append(public_key)
|
||||
|
||||
get_public_keys.key_cache[url] = public_keys
|
||||
|
||||
@@ -8,7 +8,6 @@ from redash import settings
|
||||
|
||||
try:
|
||||
from ldap3 import Connection, Server
|
||||
from ldap3.utils.conv import escape_filter_chars
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit(
|
||||
@@ -70,7 +69,6 @@ def login(org_slug=None):
|
||||
|
||||
|
||||
def auth_ldap_user(username, password):
|
||||
clean_username = escape_filter_chars(username)
|
||||
server = Server(settings.LDAP_HOST_URL, use_ssl=settings.LDAP_SSL)
|
||||
if settings.LDAP_BIND_DN is not None:
|
||||
conn = Connection(
|
||||
@@ -85,7 +83,7 @@ def auth_ldap_user(username, password):
|
||||
|
||||
conn.search(
|
||||
settings.LDAP_SEARCH_DN,
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": clean_username},
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": username},
|
||||
attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY],
|
||||
)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import json
|
||||
|
||||
import click
|
||||
import simplejson
|
||||
from flask import current_app
|
||||
from flask.cli import FlaskGroup, run_command, with_appcontext
|
||||
from rq import Connection
|
||||
@@ -54,7 +53,7 @@ def version():
|
||||
@manager.command()
|
||||
def status():
|
||||
with Connection(rq_redis_connection):
|
||||
print(json.dumps(get_status(), indent=2))
|
||||
print(simplejson.dumps(get_status(), indent=2))
|
||||
|
||||
|
||||
@manager.command()
|
||||
|
||||
@@ -5,7 +5,7 @@ logger = logging.getLogger(__name__)
|
||||
__all__ = ["BaseDestination", "register", "get_destination", "import_destinations"]
|
||||
|
||||
|
||||
class BaseDestination:
|
||||
class BaseDestination(object):
|
||||
deprecated = False
|
||||
|
||||
def __init__(self, configuration):
|
||||
|
||||
@@ -42,8 +42,8 @@ class Discord(BaseDestination):
|
||||
"inline": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
fields.append({"name": "Description", "value": alert.custom_body})
|
||||
if alert.options.get("custom_body"):
|
||||
fields.append({"name": "Description", "value": alert.options["custom_body"]})
|
||||
if new_state == Alert.TRIGGERED_STATE:
|
||||
if alert.options.get("custom_subject"):
|
||||
text = alert.options["custom_subject"]
|
||||
|
||||
@@ -26,13 +26,13 @@ class Slack(BaseDestination):
|
||||
fields = [
|
||||
{
|
||||
"title": "Query",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
"title": "Alert",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
|
||||
"short": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
@@ -50,7 +50,7 @@ class Slack(BaseDestination):
|
||||
payload = {"attachments": [{"text": text, "color": color, "fields": fields}]}
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload).encode("utf-8"), timeout=5.0)
|
||||
resp = requests.post(options.get("url"), data=json_dumps(payload), timeout=5.0)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
|
||||
@@ -15,6 +15,7 @@ from redash.authentication.account import (
|
||||
)
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.version_check import get_latest_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -255,11 +256,15 @@ def number_format_config():
|
||||
|
||||
def client_config():
|
||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||
client_config_inner = {
|
||||
client_config = {
|
||||
"newVersionAvailable": bool(get_latest_version()),
|
||||
"version": __version__,
|
||||
}
|
||||
else:
|
||||
client_config_inner = {}
|
||||
client_config = {}
|
||||
|
||||
if current_user.has_permission("admin") and current_org.get_setting("beacon_consent") is None:
|
||||
client_config["showBeaconConsentMessage"] = True
|
||||
|
||||
defaults = {
|
||||
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
@@ -279,12 +284,12 @@ def client_config():
|
||||
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||
}
|
||||
|
||||
client_config_inner.update(defaults)
|
||||
client_config_inner.update({"basePath": base_href()})
|
||||
client_config_inner.update(date_time_format_config())
|
||||
client_config_inner.update(number_format_config())
|
||||
client_config.update(defaults)
|
||||
client_config.update({"basePath": base_href()})
|
||||
client_config.update(date_time_format_config())
|
||||
client_config.update(number_format_config())
|
||||
|
||||
return client_config_inner
|
||||
return client_config
|
||||
|
||||
|
||||
def messages():
|
||||
|
||||
@@ -5,15 +5,15 @@ from flask import Blueprint, current_app, request
|
||||
from flask_login import current_user, login_required
|
||||
from flask_restful import Resource, abort
|
||||
from sqlalchemy import cast
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy_utils.functions import sort_query
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication import current_org
|
||||
from redash.models import db
|
||||
from redash.tasks import record_event as record_event_task
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils.query_order import sort_query
|
||||
|
||||
routes = Blueprint("redash", __name__, template_folder=settings.fix_assets_path("templates"))
|
||||
|
||||
@@ -114,7 +114,7 @@ def json_response(response):
|
||||
def filter_by_tags(result_set, column):
|
||||
if request.args.getlist("tags"):
|
||||
tags = request.args.getlist("tags")
|
||||
result_set = result_set.filter(cast(column, ARRAY(db.Text)).contains(tags))
|
||||
result_set = result_set.filter(cast(column, postgresql.ARRAY(db.Text)).contains(tags))
|
||||
return result_set
|
||||
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ class DashboardListResource(BaseResource):
|
||||
org=self.current_org,
|
||||
user=self.current_user,
|
||||
is_draft=True,
|
||||
layout=[],
|
||||
layout="[]",
|
||||
)
|
||||
models.db.session.add(dashboard)
|
||||
models.db.session.commit()
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
from flask import g, redirect, render_template, request, url_for
|
||||
from flask_login import login_user
|
||||
from wtforms import Form, PasswordField, StringField, validators
|
||||
from wtforms import BooleanField, Form, PasswordField, StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import routes
|
||||
from redash.models import Group, Organization, User, db
|
||||
from redash.tasks.general import subscribe
|
||||
|
||||
|
||||
class SetupForm(Form):
|
||||
@@ -14,6 +15,8 @@ class SetupForm(Form):
|
||||
email = EmailField("Email Address", validators=[validators.Email()])
|
||||
password = PasswordField("Password", validators=[validators.Length(6)])
|
||||
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
||||
security_notifications = BooleanField()
|
||||
newsletter = BooleanField()
|
||||
|
||||
|
||||
def create_org(org_name, user_name, email, password):
|
||||
@@ -54,6 +57,8 @@ def setup():
|
||||
return redirect("/")
|
||||
|
||||
form = SetupForm(request.form)
|
||||
form.newsletter.data = True
|
||||
form.security_notifications.data = True
|
||||
|
||||
if request.method == "POST" and form.validate():
|
||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||
@@ -61,6 +66,10 @@ def setup():
|
||||
g.org = default_org
|
||||
login_user(user)
|
||||
|
||||
# signup to newsletter if needed
|
||||
if form.newsletter.data or form.security_notifications:
|
||||
subscribe.delay(form.data)
|
||||
|
||||
return redirect(url_for("redash.index", org_slug=None))
|
||||
|
||||
return render_template("setup.html", form=form)
|
||||
|
||||
@@ -7,6 +7,7 @@ from redash.permissions import (
|
||||
require_permission,
|
||||
)
|
||||
from redash.serializers import serialize_visualization
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class VisualizationListResource(BaseResource):
|
||||
@@ -17,6 +18,7 @@ class VisualizationListResource(BaseResource):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org)
|
||||
require_object_modify_permission(query, self.current_user)
|
||||
|
||||
kwargs["options"] = json_dumps(kwargs["options"])
|
||||
kwargs["query_rel"] = query
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
@@ -32,6 +34,8 @@ class VisualizationResource(BaseResource):
|
||||
require_object_modify_permission(vis.query_rel, self.current_user)
|
||||
|
||||
kwargs = request.get_json(force=True)
|
||||
if "options" in kwargs:
|
||||
kwargs["options"] = json_dumps(kwargs["options"])
|
||||
|
||||
kwargs.pop("id", None)
|
||||
kwargs.pop("query_id", None)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
import simplejson
|
||||
from flask import url_for
|
||||
|
||||
WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), "../../client/dist/", "asset-manifest.json")
|
||||
@@ -15,7 +15,7 @@ def configure_webpack(app):
|
||||
if assets is None or app.debug:
|
||||
try:
|
||||
with open(WEBPACK_MANIFEST_PATH) as fp:
|
||||
assets = json.load(fp)
|
||||
assets = simplejson.load(fp)
|
||||
except IOError:
|
||||
app.logger.exception("Unable to load webpack manifest")
|
||||
assets = {}
|
||||
|
||||
@@ -9,6 +9,7 @@ from redash.permissions import (
|
||||
view_only,
|
||||
)
|
||||
from redash.serializers import serialize_widget
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class WidgetListResource(BaseResource):
|
||||
@@ -29,6 +30,7 @@ class WidgetListResource(BaseResource):
|
||||
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get("dashboard_id"), self.current_org)
|
||||
require_object_modify_permission(dashboard, self.current_user)
|
||||
|
||||
widget_properties["options"] = json_dumps(widget_properties["options"])
|
||||
widget_properties.pop("id", None)
|
||||
|
||||
visualization_id = widget_properties.pop("visualization_id")
|
||||
@@ -42,6 +44,7 @@ class WidgetListResource(BaseResource):
|
||||
|
||||
widget = models.Widget(**widget_properties)
|
||||
models.db.session.add(widget)
|
||||
models.db.session.commit()
|
||||
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
@@ -62,7 +65,7 @@ class WidgetResource(BaseResource):
|
||||
require_object_modify_permission(widget.dashboard, self.current_user)
|
||||
widget_properties = request.get_json(force=True)
|
||||
widget.text = widget_properties["text"]
|
||||
widget.options = widget_properties["options"]
|
||||
widget.options = json_dumps(widget_properties["options"])
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import time
|
||||
|
||||
import pytz
|
||||
from sqlalchemy import UniqueConstraint, and_, cast, distinct, func, or_
|
||||
from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, JSONB
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from sqlalchemy.orm import (
|
||||
@@ -40,17 +40,14 @@ from redash.models.base import (
|
||||
from redash.models.changes import Change, ChangeTrackingMixin # noqa
|
||||
from redash.models.mixins import BelongsToOrgMixin, TimestampMixin
|
||||
from redash.models.organizations import Organization
|
||||
from redash.models.parameterized_query import (
|
||||
InvalidParameterError,
|
||||
ParameterizedQuery,
|
||||
QueryDetachedFromDataSourceError,
|
||||
)
|
||||
from redash.models.parameterized_query import ParameterizedQuery
|
||||
from redash.models.types import (
|
||||
Configuration,
|
||||
EncryptedConfiguration,
|
||||
JSONText,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
json_cast_property,
|
||||
PseudoJSON,
|
||||
pseudo_json_cast_property,
|
||||
)
|
||||
from redash.models.users import ( # noqa
|
||||
AccessPermission,
|
||||
@@ -83,7 +80,7 @@ from redash.utils.configuration import ConfigurationContainer
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ScheduledQueriesExecutions:
|
||||
class ScheduledQueriesExecutions(object):
|
||||
KEY_NAME = "sq:executed_at"
|
||||
|
||||
def __init__(self):
|
||||
@@ -126,10 +123,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
|
||||
data_source_groups = db.relationship("DataSourceGroup", back_populates="data_source", cascade="all")
|
||||
__tablename__ = "data_sources"
|
||||
__table_args__ = (
|
||||
db.Index("data_sources_org_id_name", "org_id", "name"),
|
||||
{"extend_existing": True},
|
||||
)
|
||||
__table_args__ = (db.Index("data_sources_org_id_name", "org_id", "name"),)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.id == other.id
|
||||
@@ -303,11 +297,34 @@ class DataSourceGroup(db.Model):
|
||||
view_only = Column(db.Boolean, default=False)
|
||||
|
||||
__tablename__ = "data_source_groups"
|
||||
__table_args__ = ({"extend_existing": True},)
|
||||
|
||||
|
||||
DESERIALIZED_DATA_ATTR = "_deserialized_data"
|
||||
|
||||
|
||||
class DBPersistence(object):
|
||||
@property
|
||||
def data(self):
|
||||
if self._data is None:
|
||||
return None
|
||||
|
||||
if not hasattr(self, DESERIALIZED_DATA_ATTR):
|
||||
setattr(self, DESERIALIZED_DATA_ATTR, json_loads(self._data))
|
||||
|
||||
return self._deserialized_data
|
||||
|
||||
@data.setter
|
||||
def data(self, data):
|
||||
if hasattr(self, DESERIALIZED_DATA_ATTR):
|
||||
delattr(self, DESERIALIZED_DATA_ATTR)
|
||||
self._data = data
|
||||
|
||||
|
||||
QueryResultPersistence = settings.dynamic_settings.QueryResultPersistence or DBPersistence
|
||||
|
||||
|
||||
@generic_repr("id", "org_id", "data_source_id", "query_hash", "runtime", "retrieved_at")
|
||||
class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
|
||||
id = primary_key("QueryResult")
|
||||
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
|
||||
org = db.relationship(Organization)
|
||||
@@ -315,8 +332,8 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
data_source = db.relationship(DataSource, backref=backref("query_results"))
|
||||
query_hash = Column(db.String(32), index=True)
|
||||
query_text = Column("query", db.Text)
|
||||
data = Column(JSONText, nullable=True)
|
||||
runtime = Column(DOUBLE_PRECISION)
|
||||
_data = Column("data", db.Text)
|
||||
runtime = Column(postgresql.DOUBLE_PRECISION)
|
||||
retrieved_at = Column(db.DateTime(True))
|
||||
|
||||
__tablename__ = "query_results"
|
||||
@@ -457,11 +474,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
last_modified_by = db.relationship(User, backref="modified_queries", foreign_keys=[last_modified_by_id])
|
||||
is_archived = Column(db.Boolean, default=False, index=True)
|
||||
is_draft = Column(db.Boolean, default=True, index=True)
|
||||
schedule = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
interval = json_cast_property(db.Integer, "schedule", "interval", default=0)
|
||||
schedule = Column(MutableDict.as_mutable(PseudoJSON), nullable=True)
|
||||
interval = pseudo_json_cast_property(db.Integer, "schedule", "interval", default=0)
|
||||
schedule_failures = Column(db.Integer, default=0)
|
||||
visualizations = db.relationship("Visualization", cascade="all, delete-orphan")
|
||||
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
options = Column(MutableDict.as_mutable(PseudoJSON), default={})
|
||||
search_vector = Column(
|
||||
TSVectorType(
|
||||
"id",
|
||||
@@ -472,7 +489,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
),
|
||||
nullable=True,
|
||||
)
|
||||
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
|
||||
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
|
||||
|
||||
query_class = SearchBaseQuery
|
||||
__tablename__ = "queries"
|
||||
@@ -508,7 +525,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
name="Table",
|
||||
description="",
|
||||
type="TABLE",
|
||||
options={},
|
||||
options="{}",
|
||||
)
|
||||
)
|
||||
return query
|
||||
@@ -574,12 +591,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
@classmethod
|
||||
def past_scheduled_queries(cls):
|
||||
now = utils.utcnow()
|
||||
queries = Query.query.filter(func.jsonb_typeof(Query.schedule) != "null").order_by(Query.id)
|
||||
queries = Query.query.filter(Query.schedule.isnot(None)).order_by(Query.id)
|
||||
return [
|
||||
query
|
||||
for query in queries
|
||||
if "until" in query.schedule
|
||||
and query.schedule["until"] is not None
|
||||
if query.schedule["until"] is not None
|
||||
and pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d")) <= now
|
||||
]
|
||||
|
||||
@@ -587,7 +603,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
def outdated_queries(cls):
|
||||
queries = (
|
||||
Query.query.options(joinedload(Query.latest_query_data).load_only("retrieved_at"))
|
||||
.filter(func.jsonb_typeof(Query.schedule) != "null")
|
||||
.filter(Query.schedule.isnot(None))
|
||||
.order_by(Query.id)
|
||||
.all()
|
||||
)
|
||||
@@ -815,20 +831,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
def update_query_hash(self):
|
||||
should_apply_auto_limit = self.options.get("apply_auto_limit", False) if self.options else False
|
||||
query_runner = self.data_source.query_runner if self.data_source else BaseQueryRunner({})
|
||||
query_text = self.query_text
|
||||
|
||||
parameters_dict = {p["name"]: p.get("value") for p in self.parameters} if self.options else {}
|
||||
if any(parameters_dict):
|
||||
try:
|
||||
query_text = self.parameterized.apply(parameters_dict).query
|
||||
except InvalidParameterError as e:
|
||||
logging.info(f"Unable to update hash for query {self.id} because of invalid parameters: {str(e)}")
|
||||
except QueryDetachedFromDataSourceError as e:
|
||||
logging.info(
|
||||
f"Unable to update hash for query {self.id} because of dropdown query {e.query_id} is unattached from datasource"
|
||||
)
|
||||
|
||||
self.query_hash = query_runner.gen_query_hash(query_text, should_apply_auto_limit)
|
||||
self.query_hash = query_runner.gen_query_hash(self.query_text, should_apply_auto_limit)
|
||||
|
||||
|
||||
@listens_for(Query, "before_insert")
|
||||
@@ -933,7 +936,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
query_rel = db.relationship(Query, backref=backref("alerts", cascade="all"))
|
||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||
user = db.relationship(User, backref="alerts")
|
||||
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
options = Column(MutableDict.as_mutable(PseudoJSON))
|
||||
state = Column(db.String(255), default=UNKNOWN_STATE)
|
||||
subscriptions = db.relationship("AlertSubscription", cascade="all, delete-orphan")
|
||||
last_triggered_at = Column(db.DateTime(True), nullable=True)
|
||||
@@ -1044,13 +1047,13 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
|
||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||
user = db.relationship(User)
|
||||
# layout is no longer used, but kept so we know how to render old dashboards.
|
||||
layout = Column(MutableList.as_mutable(JSONB), default=[])
|
||||
layout = Column(db.Text)
|
||||
dashboard_filters_enabled = Column(db.Boolean, default=False)
|
||||
is_archived = Column(db.Boolean, default=False, index=True)
|
||||
is_draft = Column(db.Boolean, default=True, index=True)
|
||||
widgets = db.relationship("Widget", backref="dashboard", lazy="dynamic")
|
||||
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
|
||||
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
|
||||
options = Column(MutableDict.as_mutable(postgresql.JSON), server_default="{}", default={})
|
||||
|
||||
__tablename__ = "dashboards"
|
||||
__mapper_args__ = {"version_id_col": version}
|
||||
@@ -1163,7 +1166,7 @@ class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
query_rel = db.relationship(Query, back_populates="visualizations")
|
||||
name = Column(db.String(255))
|
||||
description = Column(db.String(4096), nullable=True)
|
||||
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
options = Column(db.Text)
|
||||
|
||||
__tablename__ = "visualizations"
|
||||
|
||||
@@ -1190,7 +1193,7 @@ class Widget(TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
visualization = db.relationship(Visualization, backref=backref("widgets", cascade="delete"))
|
||||
text = Column(db.Text, nullable=True)
|
||||
width = Column(db.Integer)
|
||||
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
options = Column(db.Text)
|
||||
dashboard_id = Column(key_type("Dashboard"), db.ForeignKey("dashboards.id"), index=True)
|
||||
|
||||
__tablename__ = "widgets"
|
||||
@@ -1222,7 +1225,7 @@ class Event(db.Model):
|
||||
action = Column(db.String(255))
|
||||
object_type = Column(db.String(255))
|
||||
object_id = Column(db.String(255), nullable=True)
|
||||
additional_properties = Column(MutableDict.as_mutable(JSONB), nullable=True, default={})
|
||||
additional_properties = Column(MutableDict.as_mutable(PseudoJSON), nullable=True, default={})
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = "events"
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import functools
|
||||
|
||||
from flask_sqlalchemy import BaseQuery, SQLAlchemy
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.orm import object_session
|
||||
from sqlalchemy.pool import NullPool
|
||||
from sqlalchemy_searchable import SearchQueryMixin, make_searchable, vectorizer
|
||||
|
||||
from redash import settings
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class RedashSQLAlchemy(SQLAlchemy):
|
||||
@@ -28,10 +28,7 @@ class RedashSQLAlchemy(SQLAlchemy):
|
||||
return options
|
||||
|
||||
|
||||
db = RedashSQLAlchemy(
|
||||
session_options={"expire_on_commit": False},
|
||||
engine_options={"json_serializer": json_dumps, "json_deserializer": json_loads},
|
||||
)
|
||||
db = RedashSQLAlchemy(session_options={"expire_on_commit": False})
|
||||
# Make sure the SQLAlchemy mappers are all properly configured first.
|
||||
# This is required by SQLAlchemy-Searchable as it adds DDL listeners
|
||||
# on the configuration phase of models.
|
||||
@@ -53,7 +50,7 @@ def integer_vectorizer(column):
|
||||
return db.func.cast(column, db.Text)
|
||||
|
||||
|
||||
@vectorizer(UUID)
|
||||
@vectorizer(postgresql.UUID)
|
||||
def uuid_vectorizer(column):
|
||||
return db.func.cast(column, db.Text)
|
||||
|
||||
@@ -71,7 +68,7 @@ def gfk_type(cls):
|
||||
return cls
|
||||
|
||||
|
||||
class GFKBase:
|
||||
class GFKBase(object):
|
||||
"""
|
||||
Compatibility with 'generic foreign key' approach Peewee used.
|
||||
"""
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.inspection import inspect
|
||||
from sqlalchemy_utils.models import generic_repr
|
||||
|
||||
from .base import Column, GFKBase, db, key_type, primary_key
|
||||
from .types import PseudoJSON
|
||||
|
||||
|
||||
@generic_repr("id", "object_type", "object_id", "created_at")
|
||||
@@ -13,7 +13,7 @@ class Change(GFKBase, db.Model):
|
||||
object_version = Column(db.Integer, default=0)
|
||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||
user = db.relationship("User", backref="changes")
|
||||
change = Column(JSONB)
|
||||
change = Column(PseudoJSON)
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = "changes"
|
||||
@@ -45,7 +45,7 @@ class Change(GFKBase, db.Model):
|
||||
)
|
||||
|
||||
|
||||
class ChangeTrackingMixin:
|
||||
class ChangeTrackingMixin(object):
|
||||
skipped_fields = ("id", "created_at", "updated_at", "version")
|
||||
_clean_values = None
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from sqlalchemy.event import listens_for
|
||||
from .base import Column, db
|
||||
|
||||
|
||||
class TimestampMixin:
|
||||
class TimestampMixin(object):
|
||||
updated_at = Column(db.DateTime(True), default=db.func.now(), nullable=False)
|
||||
created_at = Column(db.DateTime(True), default=db.func.now(), nullable=False)
|
||||
|
||||
@@ -17,7 +17,7 @@ def timestamp_before_update(mapper, connection, target):
|
||||
target.updated_at = db.func.now()
|
||||
|
||||
|
||||
class BelongsToOrgMixin:
|
||||
class BelongsToOrgMixin(object):
|
||||
@classmethod
|
||||
def get_by_id_and_org(cls, object_id, org, org_cls=None):
|
||||
query = cls.query.filter(cls.id == object_id)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy.orm.attributes import flag_modified
|
||||
from sqlalchemy_utils.models import generic_repr
|
||||
|
||||
@@ -6,7 +5,7 @@ from redash.settings.organization import settings as org_settings
|
||||
|
||||
from .base import Column, db, primary_key
|
||||
from .mixins import TimestampMixin
|
||||
from .types import MutableDict
|
||||
from .types import MutableDict, PseudoJSON
|
||||
from .users import Group, User
|
||||
|
||||
|
||||
@@ -18,7 +17,7 @@ class Organization(TimestampMixin, db.Model):
|
||||
id = primary_key("Organization")
|
||||
name = Column(db.String(255))
|
||||
slug = Column(db.String(255), unique=True)
|
||||
settings = Column(MutableDict.as_mutable(JSONB), default={})
|
||||
settings = Column(MutableDict.as_mutable(PseudoJSON))
|
||||
groups = db.relationship("Group", lazy="dynamic")
|
||||
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)")
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ def _is_value_within_options(value, dropdown_options, allow_list=False):
|
||||
return str(value) in dropdown_options
|
||||
|
||||
|
||||
class ParameterizedQuery:
|
||||
class ParameterizedQuery(object):
|
||||
def __init__(self, template, schema=None, org=None):
|
||||
self.schema = schema or []
|
||||
self.org = org
|
||||
|
||||
@@ -1,12 +1,25 @@
|
||||
from sqlalchemy import cast
|
||||
from sqlalchemy.dialects.postgresql import JSON
|
||||
from sqlalchemy.ext.indexable import index_property
|
||||
from sqlalchemy.ext.mutable import Mutable
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
from sqlalchemy_utils import EncryptedType
|
||||
|
||||
from redash.models.base import db
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
|
||||
from .base import db
|
||||
|
||||
|
||||
class Configuration(TypeDecorator):
|
||||
impl = db.Text
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
return value.to_json()
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return ConfigurationContainer.from_json(value)
|
||||
|
||||
|
||||
class EncryptedConfiguration(EncryptedType):
|
||||
def process_bind_param(self, value, dialect):
|
||||
@@ -18,8 +31,8 @@ class EncryptedConfiguration(EncryptedType):
|
||||
)
|
||||
|
||||
|
||||
# Utilized for cases when JSON size is bigger than JSONB (255MB) or JSON (10MB) limit
|
||||
class JSONText(TypeDecorator):
|
||||
# XXX replace PseudoJSON and MutableDict with real JSON field
|
||||
class PseudoJSON(TypeDecorator):
|
||||
impl = db.Text
|
||||
|
||||
def process_bind_param(self, value, dialect):
|
||||
@@ -94,3 +107,19 @@ class json_cast_property(index_property):
|
||||
def expr(self, model):
|
||||
expr = super(json_cast_property, self).expr(model)
|
||||
return expr.astext.cast(self.cast_type)
|
||||
|
||||
|
||||
class pseudo_json_cast_property(index_property):
|
||||
"""
|
||||
A SQLAlchemy index property that is able to cast the
|
||||
entity attribute as the specified cast type. Useful
|
||||
for PseudoJSON colums for easier querying/filtering.
|
||||
"""
|
||||
|
||||
def __init__(self, cast_type, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.cast_type = cast_type
|
||||
|
||||
def expr(self, model):
|
||||
expr = cast(getattr(model, self.attr_name), JSON)[self.index]
|
||||
return expr.astext.cast(self.cast_type)
|
||||
|
||||
@@ -8,7 +8,7 @@ from operator import or_
|
||||
from flask import current_app, request_started, url_for
|
||||
from flask_login import AnonymousUserMixin, UserMixin, current_user
|
||||
from passlib.apps import custom_app_context as pwd_context
|
||||
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy_utils import EmailType
|
||||
from sqlalchemy_utils.models import generic_repr
|
||||
|
||||
@@ -60,7 +60,7 @@ def init_app(app):
|
||||
request_started.connect(update_user_active_at, app)
|
||||
|
||||
|
||||
class PermissionsCheckMixin:
|
||||
class PermissionsCheckMixin(object):
|
||||
def has_permission(self, permission):
|
||||
return self.has_permissions((permission,))
|
||||
|
||||
@@ -84,14 +84,14 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
password_hash = Column(db.String(128), nullable=True)
|
||||
group_ids = Column(
|
||||
"groups",
|
||||
MutableList.as_mutable(ARRAY(key_type("Group"))),
|
||||
MutableList.as_mutable(postgresql.ARRAY(key_type("Group"))),
|
||||
nullable=True,
|
||||
)
|
||||
api_key = Column(db.String(40), default=lambda: generate_token(40), unique=True)
|
||||
|
||||
disabled_at = Column(db.DateTime(True), default=None, nullable=True)
|
||||
details = Column(
|
||||
MutableDict.as_mutable(JSONB),
|
||||
MutableDict.as_mutable(postgresql.JSONB),
|
||||
nullable=True,
|
||||
server_default="{}",
|
||||
default={},
|
||||
@@ -267,7 +267,7 @@ class Group(db.Model, BelongsToOrgMixin):
|
||||
org = db.relationship("Organization", back_populates="groups")
|
||||
type = Column(db.String(255), default=REGULAR_GROUP)
|
||||
name = Column(db.String(100))
|
||||
permissions = Column(ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
|
||||
permissions = Column(postgresql.ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = "groups"
|
||||
|
||||
@@ -54,7 +54,7 @@ def require_access(obj, user, need_view_only):
|
||||
abort(403)
|
||||
|
||||
|
||||
class require_permissions:
|
||||
class require_permissions(object):
|
||||
def __init__(self, permissions, allow_one=False):
|
||||
self.permissions = permissions
|
||||
self.allow_one = allow_one
|
||||
|
||||
@@ -9,6 +9,7 @@ from rq.timeouts import JobTimeoutException
|
||||
from sshtunnel import open_tunnel
|
||||
|
||||
from redash import settings, utils
|
||||
from redash.utils import json_loads
|
||||
from redash.utils.requests_session import (
|
||||
UnacceptableAddressException,
|
||||
requests_or_advocate,
|
||||
@@ -113,13 +114,12 @@ class NotSupported(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BaseQueryRunner:
|
||||
class BaseQueryRunner(object):
|
||||
deprecated = False
|
||||
should_annotate_query = True
|
||||
noop_query = None
|
||||
limit_query = " LIMIT 1000"
|
||||
limit_keywords = ["LIMIT", "OFFSET"]
|
||||
limit_after_select = False
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.syntax = "sql"
|
||||
@@ -243,7 +243,7 @@ class BaseQueryRunner:
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed running query [%s]." % query)
|
||||
return results["rows"]
|
||||
return json_loads(results)["rows"]
|
||||
|
||||
@classmethod
|
||||
def to_dict(cls):
|
||||
@@ -302,19 +302,10 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
parsed_query = sqlparse.parse(query)[0]
|
||||
limit_tokens = sqlparse.parse(self.limit_query)[0].tokens
|
||||
length = len(parsed_query.tokens)
|
||||
if not self.limit_after_select:
|
||||
if parsed_query.tokens[length - 1].ttype == sqlparse.tokens.Punctuation:
|
||||
parsed_query.tokens[length - 1 : length - 1] = limit_tokens
|
||||
else:
|
||||
parsed_query.tokens += limit_tokens
|
||||
if parsed_query.tokens[length - 1].ttype == sqlparse.tokens.Punctuation:
|
||||
parsed_query.tokens[length - 1 : length - 1] = limit_tokens
|
||||
else:
|
||||
for i in range(length - 1, -1, -1):
|
||||
if parsed_query[i].value.upper() == "SELECT":
|
||||
index = parsed_query.token_index(parsed_query[i + 1])
|
||||
parsed_query = sqlparse.sql.Statement(
|
||||
parsed_query.tokens[:index] + limit_tokens + parsed_query.tokens[index:]
|
||||
)
|
||||
break
|
||||
parsed_query.tokens += limit_tokens
|
||||
return str(parsed_query)
|
||||
|
||||
def apply_auto_limit(self, query_text, should_apply_auto_limit):
|
||||
|
||||
@@ -63,8 +63,5 @@ class AmazonElasticsearchService(ElasticSearch2):
|
||||
|
||||
self.auth = AWSV4Sign(cred, region, "es")
|
||||
|
||||
def get_auth(self):
|
||||
return self.auth
|
||||
|
||||
|
||||
register(AmazonElasticsearchService)
|
||||
|
||||
@@ -7,6 +7,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -80,11 +81,12 @@ class Arango(BaseQueryRunner):
|
||||
"rows": result,
|
||||
}
|
||||
|
||||
json_data = json_dumps(data, ignore_nan=True)
|
||||
error = None
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Arango)
|
||||
|
||||
@@ -12,6 +12,7 @@ from redash.query_runner import (
|
||||
register,
|
||||
)
|
||||
from redash.settings import parse_boolean
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
ANNOTATE_QUERY = parse_boolean(os.environ.get("ATHENA_ANNOTATE_QUERY", "true"))
|
||||
@@ -46,7 +47,7 @@ _TYPE_MAPPINGS = {
|
||||
}
|
||||
|
||||
|
||||
class SimpleFormatter:
|
||||
class SimpleFormatter(object):
|
||||
def format(self, operation, parameters=None):
|
||||
return operation
|
||||
|
||||
@@ -209,6 +210,7 @@ class Athena(BaseQueryRunner):
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
results = json_loads(results)
|
||||
for row in results["rows"]:
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
@@ -255,13 +257,14 @@ class Athena(BaseQueryRunner):
|
||||
},
|
||||
}
|
||||
|
||||
json_data = json_dumps(data, ignore_nan=True)
|
||||
error = None
|
||||
except Exception:
|
||||
if cursor.query_id:
|
||||
cursor.cancel()
|
||||
raise
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Athena)
|
||||
|
||||
@@ -13,7 +13,7 @@ from redash.query_runner import (
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -157,16 +157,17 @@ class AxibaseTSD(BaseQueryRunner):
|
||||
columns, rows = generate_rows_and_columns(data)
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
|
||||
except SQLException as e:
|
||||
data = None
|
||||
json_data = None
|
||||
error = e.content
|
||||
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
||||
sql.cancel_query(query_id)
|
||||
raise
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
connection = atsd_client.connect_url(
|
||||
|
||||
@@ -8,7 +8,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
try:
|
||||
from azure.kusto.data.exceptions import KustoServiceError
|
||||
@@ -124,15 +124,16 @@ class AzureKusto(BaseQueryRunner):
|
||||
|
||||
error = None
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
|
||||
except KustoServiceError as err:
|
||||
data = None
|
||||
json_data = None
|
||||
try:
|
||||
error = err.args[1][0]["error"]["@message"]
|
||||
except (IndexError, KeyError):
|
||||
error = err.args[1]
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
query = ".show database schema as json"
|
||||
@@ -142,6 +143,8 @@ class AzureKusto(BaseQueryRunner):
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
|
||||
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from redash.query_runner import (
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -100,7 +100,7 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self.should_annotate_query = configuration.get("useQueryAnnotation", False)
|
||||
self.should_annotate_query = configuration["useQueryAnnotation"]
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
@@ -318,6 +318,7 @@ class BigQuery(BaseQueryRunner):
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
results = json_loads(results)
|
||||
for row in results["rows"]:
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
@@ -345,8 +346,9 @@ class BigQuery(BaseQueryRunner):
|
||||
data = self._get_query_result(jobs, query)
|
||||
error = None
|
||||
|
||||
json_data = json_dumps(data, ignore_nan=True)
|
||||
except apiclient.errors.HttpError as e:
|
||||
data = None
|
||||
json_data = None
|
||||
if e.resp.status in [400, 404]:
|
||||
error = json_loads(e.content)["error"]["message"]
|
||||
else:
|
||||
@@ -361,7 +363,7 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
raise
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(BigQuery)
|
||||
|
||||
@@ -5,6 +5,7 @@ from base64 import b64decode
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.utils import JSONEncoder, json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,6 +27,13 @@ def generate_ssl_options_dict(protocol, cert_path=None):
|
||||
return ssl_options
|
||||
|
||||
|
||||
class CassandraJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, sortedset):
|
||||
return list(o)
|
||||
return super(CassandraJSONEncoder, self).default(o)
|
||||
|
||||
|
||||
class Cassandra(BaseQueryRunner):
|
||||
noop_query = "SELECT dateof(now()) FROM system.local"
|
||||
|
||||
@@ -33,12 +41,6 @@ class Cassandra(BaseQueryRunner):
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def custom_json_encoder(cls, dec, o):
|
||||
if isinstance(o, sortedset):
|
||||
return list(o)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
@@ -84,6 +86,7 @@ class Cassandra(BaseQueryRunner):
|
||||
select release_version from system.local;
|
||||
"""
|
||||
results, error = self.run_query(query, None)
|
||||
results = json_loads(results)
|
||||
release_version = results["rows"][0]["release_version"]
|
||||
|
||||
query = """
|
||||
@@ -104,6 +107,7 @@ class Cassandra(BaseQueryRunner):
|
||||
)
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
results = json_loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results["rows"]:
|
||||
@@ -151,8 +155,9 @@ class Cassandra(BaseQueryRunner):
|
||||
rows = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data, cls=CassandraJSONEncoder)
|
||||
|
||||
return data, None
|
||||
return json_data, None
|
||||
|
||||
def _generate_cert_file(self):
|
||||
cert_encoded_bytes = self.configuration.get("sslCertificateFile", None)
|
||||
|
||||
@@ -15,6 +15,7 @@ from redash.query_runner import (
|
||||
register,
|
||||
split_sql_statements,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -84,6 +85,8 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results["rows"]:
|
||||
table_name = "{}.{}".format(row["database"], row["table"])
|
||||
|
||||
@@ -121,7 +124,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
verify=verify,
|
||||
)
|
||||
|
||||
if not r.ok:
|
||||
if r.status_code != 200:
|
||||
raise Exception(r.text)
|
||||
|
||||
# In certain situations the response body can be empty even if the query was successful, for example
|
||||
@@ -129,11 +132,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
if not r.text:
|
||||
return {}
|
||||
|
||||
response = r.json()
|
||||
if "exception" in response:
|
||||
raise Exception(response["exception"])
|
||||
|
||||
return response
|
||||
return r.json()
|
||||
except requests.RequestException as e:
|
||||
if e.response:
|
||||
details = "({}, Status Code: {})".format(e.__class__.__name__, e.response.status_code)
|
||||
@@ -201,24 +200,25 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
queries = split_multi_query(query)
|
||||
|
||||
if not queries:
|
||||
data = None
|
||||
json_data = None
|
||||
error = "Query is empty"
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
try:
|
||||
# If just one query was given no session is needed
|
||||
if len(queries) == 1:
|
||||
data = self._clickhouse_query(queries[0])
|
||||
results = self._clickhouse_query(queries[0])
|
||||
else:
|
||||
# If more than one query was given, a session is needed. Parameter session_check must be false
|
||||
# for the first query
|
||||
session_id = "redash_{}".format(uuid4().hex)
|
||||
|
||||
data = self._clickhouse_query(queries[0], session_id, session_check=False)
|
||||
results = self._clickhouse_query(queries[0], session_id, session_check=False)
|
||||
|
||||
for query in queries[1:]:
|
||||
data = self._clickhouse_query(query, session_id, session_check=True)
|
||||
results = self._clickhouse_query(query, session_id, session_check=True)
|
||||
|
||||
data = json_dumps(results)
|
||||
error = None
|
||||
except Exception as e:
|
||||
data = None
|
||||
|
||||
@@ -3,7 +3,7 @@ import datetime
|
||||
import yaml
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.utils import parse_human_time
|
||||
from redash.utils import json_dumps, parse_human_time
|
||||
|
||||
try:
|
||||
import boto3
|
||||
@@ -121,7 +121,7 @@ class CloudWatch(BaseQueryRunner):
|
||||
|
||||
rows, columns = parse_response(results)
|
||||
|
||||
return {"rows": rows, "columns": columns}, None
|
||||
return json_dumps({"rows": rows, "columns": columns}), None
|
||||
|
||||
|
||||
register(CloudWatch)
|
||||
|
||||
@@ -4,7 +4,7 @@ import time
|
||||
import yaml
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.utils import parse_human_time
|
||||
from redash.utils import json_dumps, parse_human_time
|
||||
|
||||
try:
|
||||
import boto3
|
||||
@@ -146,7 +146,7 @@ class CloudWatchInsights(BaseQueryRunner):
|
||||
time.sleep(POLL_INTERVAL)
|
||||
elapsed += POLL_INTERVAL
|
||||
|
||||
return data, None
|
||||
return json_dumps(data), None
|
||||
|
||||
|
||||
register(CloudWatchInsights)
|
||||
|
||||
@@ -9,6 +9,7 @@ import logging
|
||||
from os import environ
|
||||
|
||||
from redash.query_runner import BaseQueryRunner
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
from . import register
|
||||
|
||||
@@ -114,7 +115,7 @@ class CorporateMemoryQueryRunner(BaseQueryRunner):
|
||||
logger.info("results are: {}".format(results))
|
||||
# Not sure why we do not use the json package here but all other
|
||||
# query runner do it the same way :-)
|
||||
sparql_results = results
|
||||
sparql_results = json_loads(results)
|
||||
# transform all bindings to redash rows
|
||||
rows = []
|
||||
for sparql_row in sparql_results["results"]["bindings"]:
|
||||
@@ -132,7 +133,7 @@ class CorporateMemoryQueryRunner(BaseQueryRunner):
|
||||
columns.append({"name": var, "friendly_name": var, "type": "string"})
|
||||
# Not sure why we do not use the json package here but all other
|
||||
# query runner do it the same way :-)
|
||||
return {"columns": columns, "rows": rows}
|
||||
return json_dumps({"columns": columns, "rows": rows})
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
|
||||
@@ -10,6 +10,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
try:
|
||||
@@ -154,7 +155,7 @@ class Couchbase(BaseQueryRunner):
|
||||
rows, columns = parse_results(result.json()["results"])
|
||||
data = {"columns": columns, "rows": rows}
|
||||
|
||||
return data, None
|
||||
return json_dumps(data), None
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
import yaml
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, NotSupported, register
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils.requests_session import (
|
||||
UnacceptableAddressException,
|
||||
requests_or_advocate,
|
||||
@@ -95,18 +96,19 @@ class CSV(BaseQueryRunner):
|
||||
break
|
||||
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
|
||||
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
data = None
|
||||
json_data = None
|
||||
except UnacceptableAddressException:
|
||||
error = "Can't query private addresses."
|
||||
data = None
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
error = "Error reading {0}. {1}".format(path, str(e))
|
||||
data = None
|
||||
json_data = None
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self):
|
||||
raise NotSupported()
|
||||
|
||||
@@ -16,6 +16,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
|
||||
class Databend(BaseQueryRunner):
|
||||
@@ -84,10 +85,11 @@ class Databend(BaseQueryRunner):
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
query = """
|
||||
@@ -104,6 +106,7 @@ class Databend(BaseQueryRunner):
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
schema = {}
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results["rows"]:
|
||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||
@@ -130,6 +133,7 @@ class Databend(BaseQueryRunner):
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
schema = {}
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results["rows"]:
|
||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||
|
||||
@@ -16,6 +16,7 @@ from redash.query_runner import (
|
||||
split_sql_statements,
|
||||
)
|
||||
from redash.settings import cast_int_or_default
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
try:
|
||||
import pyodbc
|
||||
@@ -114,13 +115,16 @@ class Databricks(BaseSQLQueryRunner):
|
||||
logger.warning("Truncated result set.")
|
||||
statsd_client.incr("redash.query_runner.databricks.truncated")
|
||||
data["truncated"] = True
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
else:
|
||||
error = None
|
||||
data = {
|
||||
"columns": [{"name": "result", "type": TYPE_STRING}],
|
||||
"rows": [{"result": "No data was returned."}],
|
||||
}
|
||||
json_data = json_dumps(
|
||||
{
|
||||
"columns": [{"name": "result", "type": TYPE_STRING}],
|
||||
"rows": [{"result": "No data was returned."}],
|
||||
}
|
||||
)
|
||||
|
||||
cursor.close()
|
||||
except pyodbc.Error as e:
|
||||
@@ -128,9 +132,9 @@ class Databricks(BaseSQLQueryRunner):
|
||||
error = str(e.args[1])
|
||||
else:
|
||||
error = str(e)
|
||||
data = None
|
||||
json_data = None
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self):
|
||||
raise NotSupported()
|
||||
@@ -142,6 +146,8 @@ class Databricks(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
first_column_name = results["columns"][0]["name"]
|
||||
return [row[first_column_name] for row in results["rows"]]
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from redash.query_runner import (
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -77,6 +78,8 @@ class DB2(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results["rows"]:
|
||||
if row["TABLE_SCHEMA"] != "public":
|
||||
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
||||
@@ -127,22 +130,23 @@ class DB2(BaseSQLQueryRunner):
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
else:
|
||||
error = "Query completed but it returned no data."
|
||||
data = None
|
||||
json_data = None
|
||||
except (select.error, OSError):
|
||||
error = "Query interrupted. Please retry."
|
||||
data = None
|
||||
json_data = None
|
||||
except ibm_db_dbi.DatabaseError as e:
|
||||
error = str(e)
|
||||
data = None
|
||||
json_data = None
|
||||
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
||||
connection.cancel()
|
||||
raise
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(DB2)
|
||||
|
||||
@@ -8,6 +8,7 @@ except ImportError:
|
||||
enabled = False
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
def reduce_item(reduced_item, key, value):
|
||||
@@ -80,7 +81,7 @@ class Dgraph(BaseQueryRunner):
|
||||
client_stub.close()
|
||||
|
||||
def run_query(self, query, user):
|
||||
data = None
|
||||
json_data = None
|
||||
error = None
|
||||
|
||||
try:
|
||||
@@ -108,10 +109,12 @@ class Dgraph(BaseQueryRunner):
|
||||
|
||||
# finally, assemble both the columns and data
|
||||
data = {"columns": columns, "rows": processed_data}
|
||||
|
||||
json_data = json_dumps(data)
|
||||
except Exception as e:
|
||||
error = e
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
"""Queries Dgraph for all the predicates, their types, their tokenizers, etc.
|
||||
|
||||
@@ -13,6 +13,7 @@ from redash.query_runner import (
|
||||
guess_type,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -97,7 +98,9 @@ class Drill(BaseHTTPQueryRunner):
|
||||
if error is not None:
|
||||
return None, error
|
||||
|
||||
return parse_response(response.json()), None
|
||||
results = parse_response(response.json())
|
||||
|
||||
return json_dumps(results), None
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
query = """
|
||||
@@ -129,6 +132,8 @@ class Drill(BaseHTTPQueryRunner):
|
||||
if error is not None:
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
schema = {}
|
||||
|
||||
for row in results["rows"]:
|
||||
|
||||
@@ -12,6 +12,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
TYPES_MAP = {1: TYPE_STRING, 2: TYPE_INTEGER, 3: TYPE_BOOLEAN}
|
||||
|
||||
@@ -58,10 +59,12 @@ class Druid(BaseQueryRunner):
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
print(json_data)
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
query = """
|
||||
@@ -78,6 +81,7 @@ class Druid(BaseQueryRunner):
|
||||
self._handle_run_query_error(error)
|
||||
|
||||
schema = {}
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results["rows"]:
|
||||
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
||||
|
||||
@@ -19,6 +19,7 @@ try:
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -105,17 +106,18 @@ class e6data(BaseQueryRunner):
|
||||
columns.append({"name": column_name, "type": column_type})
|
||||
rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
|
||||
except Exception as error:
|
||||
logger.debug(error)
|
||||
data = None
|
||||
json_data = None
|
||||
finally:
|
||||
if cursor is not None:
|
||||
cursor.clear()
|
||||
cursor.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def test_connection(self):
|
||||
self.noop_query = "SELECT 1"
|
||||
|
||||
@@ -16,7 +16,7 @@ from redash.query_runner import (
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
@@ -129,8 +129,6 @@ class BaseElasticSearch(BaseQueryRunner):
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
if not isinstance(index_mappings["mappings"][m], dict):
|
||||
continue
|
||||
if "properties" not in index_mappings["mappings"][m]:
|
||||
continue
|
||||
for property_name in index_mappings["mappings"][m]["properties"]:
|
||||
@@ -408,18 +406,18 @@ class Kibana(BaseElasticSearch):
|
||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||
raise Exception("Advanced queries are not supported")
|
||||
|
||||
data = {"columns": result_columns, "rows": result_rows}
|
||||
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
|
||||
except requests.HTTPError as e:
|
||||
logger.exception(e)
|
||||
r = e.response
|
||||
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
|
||||
data = None
|
||||
json_data = None
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.exception(e)
|
||||
error = "Connection refused"
|
||||
data = None
|
||||
json_data = None
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
class ElasticSearch(BaseElasticSearch):
|
||||
@@ -462,20 +460,20 @@ class ElasticSearch(BaseElasticSearch):
|
||||
result_rows = []
|
||||
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
|
||||
|
||||
data = {"columns": result_columns, "rows": result_rows}
|
||||
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
|
||||
except (KeyboardInterrupt, JobTimeoutException) as e:
|
||||
logger.exception(e)
|
||||
raise
|
||||
except requests.HTTPError as e:
|
||||
logger.exception(e)
|
||||
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
|
||||
data = None
|
||||
json_data = None
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.exception(e)
|
||||
error = "Connection refused"
|
||||
data = None
|
||||
json_data = None
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Kibana)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Tuple
|
||||
|
||||
@@ -11,6 +10,7 @@ from redash.query_runner import (
|
||||
BaseHTTPQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,7 +46,7 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
||||
self.syntax = "json"
|
||||
|
||||
def get_response(self, url, auth=None, http_method="get", **kwargs):
|
||||
url = "{}{}".format(self.configuration["server"], url)
|
||||
url = "{}{}".format(self.configuration["url"], url)
|
||||
headers = kwargs.pop("headers", {})
|
||||
headers["Accept"] = "application/json"
|
||||
return super().get_response(url, auth, http_method, headers=headers, **kwargs)
|
||||
@@ -62,10 +62,11 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
||||
query_results = response.json()
|
||||
data = self._parse_results(result_fields, query_results)
|
||||
error = None
|
||||
return data, error
|
||||
json_data = json_dumps(data)
|
||||
return json_data, error
|
||||
|
||||
def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
|
||||
query = json.loads(query)
|
||||
query = json_loads(query)
|
||||
index_name = query.pop("index", "")
|
||||
result_fields = query.pop("result_fields", None)
|
||||
url = "/{}/_search".format(index_name)
|
||||
|
||||
@@ -9,6 +9,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
def _exasol_type_mapper(val, data_type):
|
||||
@@ -108,13 +109,14 @@ class Exasol(BaseQueryRunner):
|
||||
|
||||
rows = [dict(zip(cnames, row)) for row in statement]
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
finally:
|
||||
if statement is not None:
|
||||
statement.close()
|
||||
|
||||
connection.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
query = """
|
||||
|
||||
@@ -3,6 +3,7 @@ import logging
|
||||
import yaml
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, NotSupported, register
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils.requests_session import (
|
||||
UnacceptableAddressException,
|
||||
requests_or_advocate,
|
||||
@@ -93,18 +94,19 @@ class Excel(BaseQueryRunner):
|
||||
break
|
||||
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
|
||||
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
data = None
|
||||
json_data = None
|
||||
except UnacceptableAddressException:
|
||||
error = "Can't query private addresses."
|
||||
data = None
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
error = "Error reading {0}. {1}".format(path, str(e))
|
||||
data = None
|
||||
json_data = None
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self):
|
||||
raise NotSupported()
|
||||
|
||||
@@ -12,7 +12,7 @@ from redash.query_runner import (
|
||||
BaseSQLQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -180,14 +180,15 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
response = api.get(**params).execute()
|
||||
data = parse_ga_response(response)
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
except HttpError as e:
|
||||
# Make sure we return a more readable error to the end user
|
||||
error = e._get_reason()
|
||||
data = None
|
||||
json_data = None
|
||||
else:
|
||||
error = "Wrong query format."
|
||||
data = None
|
||||
return data, error
|
||||
json_data = None
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(GoogleAnalytics)
|
||||
|
||||
@@ -13,7 +13,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -160,8 +160,9 @@ class GoogleAnalytics4(BaseQueryRunner):
|
||||
data = parse_ga_response(raw_result)
|
||||
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
def test_connection(self):
|
||||
try:
|
||||
|
||||
@@ -11,7 +11,7 @@ from redash.query_runner import (
|
||||
BaseSQLQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -151,14 +151,15 @@ class GoogleSearchConsole(BaseSQLQueryRunner):
|
||||
response = api.searchanalytics().query(siteUrl=site_url, body=params).execute()
|
||||
data = parse_ga_response(response, params["dimensions"])
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
except HttpError as e:
|
||||
# Make sure we return a more readable error to the end user
|
||||
error = e._get_reason()
|
||||
data = None
|
||||
json_data = None
|
||||
else:
|
||||
error = "Wrong query format."
|
||||
data = None
|
||||
return data, error
|
||||
json_data = None
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(GoogleSearchConsole)
|
||||
|
||||
@@ -16,7 +16,7 @@ from redash.query_runner import (
|
||||
guess_type,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -257,7 +257,7 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
||||
|
||||
data = parse_spreadsheet(SpreadsheetWrapper(spreadsheet), worksheet_num_or_title)
|
||||
|
||||
return data, None
|
||||
return json_dumps(data), None
|
||||
except gspread.SpreadsheetNotFound:
|
||||
return (
|
||||
None,
|
||||
|
||||
@@ -10,6 +10,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,7 +35,8 @@ def _transform_result(response):
|
||||
}
|
||||
)
|
||||
|
||||
return {"columns": columns, "rows": rows}
|
||||
data = {"columns": columns, "rows": rows}
|
||||
return json_dumps(data)
|
||||
|
||||
|
||||
class Graphite(BaseQueryRunner):
|
||||
|
||||
@@ -12,6 +12,7 @@ from redash.query_runner import (
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -138,6 +139,7 @@ class Hive(BaseSQLQueryRunner):
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except (KeyboardInterrupt, JobTimeoutException):
|
||||
if connection:
|
||||
@@ -148,12 +150,12 @@ class Hive(BaseSQLQueryRunner):
|
||||
error = e.args[0].status.errorMessage
|
||||
except AttributeError:
|
||||
error = str(e)
|
||||
data = None
|
||||
json_data = None
|
||||
finally:
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
class HiveHttp(Hive):
|
||||
|
||||
@@ -12,6 +12,7 @@ from redash.query_runner import (
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
ignite_available = importlib.util.find_spec("pyignite") is not None
|
||||
gridgain_available = importlib.util.find_spec("pygridgain") is not None
|
||||
@@ -80,6 +81,8 @@ class Ignite(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results["rows"]:
|
||||
if row["SCHEMA_NAME"] != self.configuration.get("schema", "PUBLIC"):
|
||||
table_name = "{}.{}".format(row["SCHEMA_NAME"], row["TABLE_NAME"])
|
||||
@@ -157,8 +160,8 @@ class Ignite(BaseSQLQueryRunner):
|
||||
)
|
||||
logger.debug("Ignite running query: %s", query)
|
||||
|
||||
result = self._parse_results(cursor)
|
||||
data = {"columns": result[0], "rows": result[1]}
|
||||
data = self._parse_results(cursor)
|
||||
json_data = json_dumps({"columns": data[0], "rows": data[1]})
|
||||
error = None
|
||||
|
||||
except (KeyboardInterrupt, JobTimeoutException):
|
||||
@@ -168,7 +171,7 @@ class Ignite(BaseSQLQueryRunner):
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Ignite)
|
||||
|
||||
@@ -10,6 +10,7 @@ from redash.query_runner import (
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -119,13 +120,14 @@ class Impala(BaseSQLQueryRunner):
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
cursor.close()
|
||||
except DatabaseError as e:
|
||||
data = None
|
||||
json_data = None
|
||||
error = str(e)
|
||||
except RPCError as e:
|
||||
data = None
|
||||
json_data = None
|
||||
error = "Metastore Error [%s]" % str(e)
|
||||
except (KeyboardInterrupt, JobTimeoutException):
|
||||
connection.cancel()
|
||||
@@ -134,7 +136,7 @@ class Impala(BaseSQLQueryRunner):
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Impala)
|
||||
|
||||
@@ -7,6 +7,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -63,7 +64,7 @@ def _transform_result(results):
|
||||
else:
|
||||
result_columns = [{"name": c, "type": TYPE_STRING} for c in column_names]
|
||||
|
||||
return {"columns": result_columns, "rows": result_rows}
|
||||
return json_dumps({"columns": result_columns, "rows": result_rows})
|
||||
|
||||
|
||||
class InfluxDB(BaseQueryRunner):
|
||||
|
||||
@@ -13,6 +13,7 @@ from redash.query_runner import (
|
||||
BaseQueryRunner,
|
||||
register,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
try:
|
||||
from influxdb_client import InfluxDBClient
|
||||
@@ -187,7 +188,7 @@ class InfluxDBv2(BaseQueryRunner):
|
||||
2. element: An error message, if an error occured. None, if no
|
||||
error occurred.
|
||||
"""
|
||||
data = None
|
||||
json_data = None
|
||||
error = None
|
||||
|
||||
try:
|
||||
@@ -203,12 +204,14 @@ class InfluxDBv2(BaseQueryRunner):
|
||||
tables = client.query_api().query(query)
|
||||
|
||||
data = self._get_data_from_tables(tables)
|
||||
|
||||
json_data = json_dumps(data)
|
||||
except Exception as ex:
|
||||
error = str(ex)
|
||||
finally:
|
||||
self._cleanup_cert_files(influx_kwargs)
|
||||
|
||||
return data, error
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(InfluxDBv2)
|
||||
|
||||
@@ -2,11 +2,11 @@ import re
|
||||
from collections import OrderedDict
|
||||
|
||||
from redash.query_runner import TYPE_STRING, BaseHTTPQueryRunner, register
|
||||
from redash.utils import json_loads
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
|
||||
# TODO: make this more general and move into __init__.py
|
||||
class ResultSet:
|
||||
class ResultSet(object):
|
||||
def __init__(self):
|
||||
self.columns = OrderedDict()
|
||||
self.rows = []
|
||||
@@ -26,7 +26,7 @@ class ResultSet:
|
||||
}
|
||||
|
||||
def to_json(self):
|
||||
return {"rows": self.rows, "columns": list(self.columns.values())}
|
||||
return json_dumps({"rows": self.rows, "columns": list(self.columns.values())})
|
||||
|
||||
def merge(self, set):
|
||||
self.rows = self.rows + set.rows
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user