mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Compare commits
28 Commits
24.11.0-de
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
008d466d34 | ||
|
|
71902e5933 | ||
|
|
53eab14cef | ||
|
|
925bb91d8e | ||
|
|
ec2ca6f986 | ||
|
|
96ea0194e8 | ||
|
|
2566229b0e | ||
|
|
2776992101 | ||
|
|
85f001982e | ||
|
|
d03a2c4096 | ||
|
|
8c5890482a | ||
|
|
10ce280a96 | ||
|
|
0dd7ac3d2e | ||
|
|
4ee53a9445 | ||
|
|
c08292d90e | ||
|
|
3142131cdd | ||
|
|
530c1a0734 | ||
|
|
52dc1769a1 | ||
|
|
b9583c0b48 | ||
|
|
89d7f54e90 | ||
|
|
d884da2b0b | ||
|
|
f7d485082c | ||
|
|
130ab1fe1a | ||
|
|
2ff83679fe | ||
|
|
de49b73855 | ||
|
|
c12e68f5d1 | ||
|
|
baa9bbd505 | ||
|
|
349cd5d031 |
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -3,7 +3,7 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
pull_request_target:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
env:
|
env:
|
||||||
@@ -60,15 +60,17 @@ jobs:
|
|||||||
mkdir -p /tmp/test-results/unit-tests
|
mkdir -p /tmp/test-results/unit-tests
|
||||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||||
- name: Upload coverage reports to Codecov
|
# - name: Upload coverage reports to Codecov
|
||||||
uses: codecov/codecov-action@v3
|
# uses: codecov/codecov-action@v3
|
||||||
|
# with:
|
||||||
|
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- name: Store Test Results
|
- name: Store Test Results
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: test-results
|
name: backend-test-results
|
||||||
path: /tmp/test-results
|
path: /tmp/test-results
|
||||||
- name: Store Coverage Results
|
- name: Store Coverage Results
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: coverage
|
name: coverage
|
||||||
path: coverage.xml
|
path: coverage.xml
|
||||||
@@ -94,9 +96,9 @@ jobs:
|
|||||||
- name: Run Lint
|
- name: Run Lint
|
||||||
run: yarn lint:ci
|
run: yarn lint:ci
|
||||||
- name: Store Test Results
|
- name: Store Test Results
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: test-results
|
name: frontend-test-results
|
||||||
path: /tmp/test-results
|
path: /tmp/test-results
|
||||||
|
|
||||||
frontend-unit-tests:
|
frontend-unit-tests:
|
||||||
@@ -132,9 +134,9 @@ jobs:
|
|||||||
COMPOSE_PROJECT_NAME: cypress
|
COMPOSE_PROJECT_NAME: cypress
|
||||||
CYPRESS_INSTALL_BINARY: 0
|
CYPRESS_INSTALL_BINARY: 0
|
||||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
# PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
# CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
# CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||||
steps:
|
steps:
|
||||||
- if: github.event.pull_request.mergeable == 'false'
|
- if: github.event.pull_request.mergeable == 'false'
|
||||||
name: Exit if PR is not mergeable
|
name: Exit if PR is not mergeable
|
||||||
@@ -169,7 +171,7 @@ jobs:
|
|||||||
- name: Copy Code Coverage Results
|
- name: Copy Code Coverage Results
|
||||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||||
- name: Store Coverage Results
|
- name: Store Coverage Results
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: coverage
|
name: coverage
|
||||||
path: coverage
|
path: coverage
|
||||||
|
|||||||
114
.github/workflows/preview-image.yml
vendored
114
.github/workflows/preview-image.yml
vendored
@@ -4,6 +4,15 @@ on:
|
|||||||
tags:
|
tags:
|
||||||
- '*-dev'
|
- '*-dev'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
dockerRepository:
|
||||||
|
description: 'Docker repository'
|
||||||
|
required: true
|
||||||
|
default: 'preview'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- preview
|
||||||
|
- redash
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_VERSION: 18
|
NODE_VERSION: 18
|
||||||
@@ -30,7 +39,20 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch:
|
||||||
|
- amd64
|
||||||
|
- arm64
|
||||||
|
include:
|
||||||
|
- arch: amd64
|
||||||
|
os: ubuntu-22.04
|
||||||
|
- arch: arm64
|
||||||
|
os: ubuntu-22.04-arm
|
||||||
|
outputs:
|
||||||
|
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
|
||||||
needs:
|
needs:
|
||||||
- build-skip-check
|
- build-skip-check
|
||||||
if: needs.build-skip-check.outputs.skip == 'false'
|
if: needs.build-skip-check.outputs.skip == 'false'
|
||||||
@@ -45,11 +67,6 @@ jobs:
|
|||||||
node-version: ${{ env.NODE_VERSION }}
|
node-version: ${{ env.NODE_VERSION }}
|
||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
with:
|
|
||||||
platforms: arm64
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
@@ -60,6 +77,8 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKER_PASS }}
|
password: ${{ secrets.DOCKER_PASS }}
|
||||||
|
|
||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
|
env:
|
||||||
|
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
|
||||||
run: |
|
run: |
|
||||||
npm install --global --force yarn@1.22.22
|
npm install --global --force yarn@1.22.22
|
||||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||||
@@ -72,23 +91,92 @@ jobs:
|
|||||||
VERSION_TAG=$(jq -r .version package.json)
|
VERSION_TAG=$(jq -r .version package.json)
|
||||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
# TODO: We can use GitHub Actions's matrix option to reduce the build time.
|
|
||||||
- name: Build and push preview image to Docker Hub
|
- name: Build and push preview image to Docker Hub
|
||||||
|
id: build-preview
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v4
|
||||||
|
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||||
with:
|
with:
|
||||||
push: true
|
|
||||||
tags: |
|
tags: |
|
||||||
redash/redash:preview
|
${{ vars.DOCKER_USER }}/redash
|
||||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
${{ vars.DOCKER_USER }}/preview
|
||||||
context: .
|
context: .
|
||||||
build-args: |
|
build-args: |
|
||||||
test_all_deps=true
|
test_all_deps=true
|
||||||
cache-from: type=gha,scope=multi-platform
|
outputs: type=image,push-by-digest=true,push=true
|
||||||
cache-to: type=gha,mode=max,scope=multi-platform
|
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||||
platforms: linux/amd64,linux/arm64
|
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||||
|
env:
|
||||||
|
DOCKER_CONTENT_TRUST: true
|
||||||
|
|
||||||
|
- name: Build and push release image to Docker Hub
|
||||||
|
id: build-release
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||||
|
with:
|
||||||
|
tags: |
|
||||||
|
${{ vars.DOCKER_USER }}/redash:${{ steps.version.outputs.VERSION_TAG }}
|
||||||
|
context: .
|
||||||
|
build-args: |
|
||||||
|
test_all_deps=true
|
||||||
|
outputs: type=image,push-by-digest=true,push=true
|
||||||
|
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||||
|
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||||
env:
|
env:
|
||||||
DOCKER_CONTENT_TRUST: true
|
DOCKER_CONTENT_TRUST: true
|
||||||
|
|
||||||
- name: "Failure: output container logs to console"
|
- name: "Failure: output container logs to console"
|
||||||
if: failure()
|
if: failure()
|
||||||
run: docker compose logs
|
run: docker compose logs
|
||||||
|
|
||||||
|
- name: Export digest
|
||||||
|
run: |
|
||||||
|
mkdir -p ${{ runner.temp }}/digests
|
||||||
|
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
|
||||||
|
digest="${{ steps.build-preview.outputs.digest}}"
|
||||||
|
else
|
||||||
|
digest="${{ steps.build-release.outputs.digest}}"
|
||||||
|
fi
|
||||||
|
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||||
|
|
||||||
|
- name: Upload digest
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: digests-${{ matrix.arch }}
|
||||||
|
path: ${{ runner.temp }}/digests/*
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
merge-docker-image:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
needs: build-docker-image
|
||||||
|
steps:
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ vars.DOCKER_USER }}
|
||||||
|
password: ${{ secrets.DOCKER_PASS }}
|
||||||
|
|
||||||
|
- name: Download digests
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: ${{ runner.temp }}/digests
|
||||||
|
pattern: digests-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Create and push manifest for the preview image
|
||||||
|
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
|
||||||
|
working-directory: ${{ runner.temp }}/digests
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:preview \
|
||||||
|
$(printf '${{ vars.DOCKER_USER }}/redash:preview@sha256:%s ' *)
|
||||||
|
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||||
|
$(printf '${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||||
|
|
||||||
|
- name: Create and push manifest for the release image
|
||||||
|
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
|
||||||
|
working-directory: ${{ runner.temp }}/digests
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
|
||||||
|
$(printf '${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
|
||||||
|
|||||||
2
Makefile
2
Makefile
@@ -34,7 +34,7 @@ clean:
|
|||||||
|
|
||||||
clean-all: clean
|
clean-all: clean
|
||||||
docker image rm --force \
|
docker image rm --force \
|
||||||
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
|
redash/redash:latest redis:7-alpine maildev/maildev:latest \
|
||||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||||
|
|
||||||
down:
|
down:
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ help() {
|
|||||||
echo ""
|
echo ""
|
||||||
echo "shell -- open shell"
|
echo "shell -- open shell"
|
||||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
echo "debug -- start Flask development server with remote debugger via debugpy"
|
||||||
echo "create_db -- create database tables"
|
echo "create_db -- create database tables"
|
||||||
echo "manage -- CLI to manage redash"
|
echo "manage -- CLI to manage redash"
|
||||||
echo "tests -- run tests"
|
echo "tests -- run tests"
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import { clientConfig } from "@/services/auth";
|
import Link from "@/components/Link";
|
||||||
|
import { clientConfig, currentUser } from "@/services/auth";
|
||||||
import frontendVersion from "@/version.json";
|
import frontendVersion from "@/version.json";
|
||||||
|
|
||||||
export default function VersionInfo() {
|
export default function VersionInfo() {
|
||||||
@@ -9,6 +10,15 @@ export default function VersionInfo() {
|
|||||||
Version: {clientConfig.version}
|
Version: {clientConfig.version}
|
||||||
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
|
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
|
||||||
</div>
|
</div>
|
||||||
|
{clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
|
||||||
|
<div className="m-t-10">
|
||||||
|
{/* eslint-disable react/jsx-no-target-blank */}
|
||||||
|
<Link href="https://version.redash.io/" className="update-available" target="_blank" rel="noopener">
|
||||||
|
Update Available <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||||
|
<span className="sr-only">(opens in a new tab)</span>
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</React.Fragment>
|
</React.Fragment>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
79
client/app/components/BeaconConsent.jsx
Normal file
79
client/app/components/BeaconConsent.jsx
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import React, { useState } from "react";
|
||||||
|
import Card from "antd/lib/card";
|
||||||
|
import Button from "antd/lib/button";
|
||||||
|
import Typography from "antd/lib/typography";
|
||||||
|
import { clientConfig } from "@/services/auth";
|
||||||
|
import Link from "@/components/Link";
|
||||||
|
import HelpTrigger from "@/components/HelpTrigger";
|
||||||
|
import DynamicComponent from "@/components/DynamicComponent";
|
||||||
|
import OrgSettings from "@/services/organizationSettings";
|
||||||
|
|
||||||
|
const Text = Typography.Text;
|
||||||
|
|
||||||
|
function BeaconConsent() {
|
||||||
|
const [hide, setHide] = useState(false);
|
||||||
|
|
||||||
|
if (!clientConfig.showBeaconConsentMessage || hide) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hideConsentCard = () => {
|
||||||
|
clientConfig.showBeaconConsentMessage = false;
|
||||||
|
setHide(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const confirmConsent = (confirm) => {
|
||||||
|
let message = "🙏 Thank you.";
|
||||||
|
|
||||||
|
if (!confirm) {
|
||||||
|
message = "Settings Saved.";
|
||||||
|
}
|
||||||
|
|
||||||
|
OrgSettings.save({ beacon_consent: confirm }, message)
|
||||||
|
// .then(() => {
|
||||||
|
// // const settings = get(response, 'settings');
|
||||||
|
// // this.setState({ settings, formValues: { ...settings } });
|
||||||
|
// })
|
||||||
|
.finally(hideConsentCard);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<DynamicComponent name="BeaconConsent">
|
||||||
|
<div className="m-t-10 tiled">
|
||||||
|
<Card
|
||||||
|
title={
|
||||||
|
<>
|
||||||
|
Would you be ok with sharing anonymous usage data with the Redash team?{" "}
|
||||||
|
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
bordered={false}
|
||||||
|
>
|
||||||
|
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||||
|
<div className="m-t-5">
|
||||||
|
<ul>
|
||||||
|
<li> Number of users, queries, dashboards, alerts, widgets and visualizations.</li>
|
||||||
|
<li> Types of data sources, alert destinations and visualizations.</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<Text>All data is aggregated and will never include any sensitive or private data.</Text>
|
||||||
|
<div className="m-t-5">
|
||||||
|
<Button type="primary" className="m-r-5" onClick={() => confirmConsent(true)}>
|
||||||
|
Yes
|
||||||
|
</Button>
|
||||||
|
<Button type="default" onClick={() => confirmConsent(false)}>
|
||||||
|
No
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
<div className="m-t-15">
|
||||||
|
<Text type="secondary">
|
||||||
|
You can change this setting anytime from the <Link href="settings/general">Settings</Link> page.
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
</DynamicComponent>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default BeaconConsent;
|
||||||
@@ -23,6 +23,7 @@ export const TYPES = mapValues(
|
|||||||
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
|
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
|
||||||
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
|
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
|
||||||
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
|
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
|
||||||
|
USAGE_DATA_SHARING: ["/open-source/admin-guide/usage-data", "Help: Anonymous Usage Data Sharing"],
|
||||||
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
|
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
|
||||||
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
|
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
|
||||||
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
|
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
|
||||||
@@ -100,7 +101,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
|||||||
clearTimeout(this.iframeLoadingTimeout);
|
clearTimeout(this.iframeLoadingTimeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
loadIframe = url => {
|
loadIframe = (url) => {
|
||||||
clearTimeout(this.iframeLoadingTimeout);
|
clearTimeout(this.iframeLoadingTimeout);
|
||||||
this.setState({ loading: true, error: false });
|
this.setState({ loading: true, error: false });
|
||||||
|
|
||||||
@@ -115,8 +116,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
|||||||
clearTimeout(this.iframeLoadingTimeout);
|
clearTimeout(this.iframeLoadingTimeout);
|
||||||
};
|
};
|
||||||
|
|
||||||
onPostMessageReceived = event => {
|
onPostMessageReceived = (event) => {
|
||||||
if (!some(allowedDomains, domain => startsWith(event.origin, domain))) {
|
if (!some(allowedDomains, (domain) => startsWith(event.origin, domain))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -133,7 +134,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
|||||||
return helpTriggerType ? helpTriggerType[0] : this.props.href;
|
return helpTriggerType ? helpTriggerType[0] : this.props.href;
|
||||||
};
|
};
|
||||||
|
|
||||||
openDrawer = e => {
|
openDrawer = (e) => {
|
||||||
// keep "open in new tab" behavior
|
// keep "open in new tab" behavior
|
||||||
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
|
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
@@ -143,7 +144,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
closeDrawer = event => {
|
closeDrawer = (event) => {
|
||||||
if (event) {
|
if (event) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
}
|
}
|
||||||
@@ -160,7 +161,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
|||||||
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
|
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
|
||||||
const className = cx("help-trigger", this.props.className);
|
const className = cx("help-trigger", this.props.className);
|
||||||
const url = this.state.currentUrl;
|
const url = this.state.currentUrl;
|
||||||
const isAllowedDomain = some(allowedDomains, domain => startsWith(url || targetUrl, domain));
|
const isAllowedDomain = some(allowedDomains, (domain) => startsWith(url || targetUrl, domain));
|
||||||
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
|
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -179,13 +180,15 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
|||||||
)}
|
)}
|
||||||
</>
|
</>
|
||||||
) : null
|
) : null
|
||||||
}>
|
}
|
||||||
|
>
|
||||||
<Link
|
<Link
|
||||||
href={url || this.getUrl()}
|
href={url || this.getUrl()}
|
||||||
className={className}
|
className={className}
|
||||||
rel="noopener noreferrer"
|
rel="noopener noreferrer"
|
||||||
target="_blank"
|
target="_blank"
|
||||||
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}>
|
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}
|
||||||
|
>
|
||||||
{this.props.children}
|
{this.props.children}
|
||||||
</Link>
|
</Link>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
@@ -196,7 +199,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
|
|||||||
visible={this.state.visible}
|
visible={this.state.visible}
|
||||||
className={cx("help-drawer", drawerClassName)}
|
className={cx("help-drawer", drawerClassName)}
|
||||||
destroyOnClose
|
destroyOnClose
|
||||||
width={400}>
|
width={400}
|
||||||
|
>
|
||||||
<div className="drawer-wrapper">
|
<div className="drawer-wrapper">
|
||||||
<div className="drawer-menu">
|
<div className="drawer-menu">
|
||||||
{url && (
|
{url && (
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
|
|||||||
// DataSourcePreviewCard
|
// DataSourcePreviewCard
|
||||||
|
|
||||||
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
|
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
|
||||||
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
|
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
|
||||||
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
|
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
|
||||||
return (
|
return (
|
||||||
<PreviewCard {...props} imageUrl={imageUrl} title={title}>
|
<PreviewCard {...props} imageUrl={imageUrl} title={title}>
|
||||||
|
|||||||
@@ -96,7 +96,7 @@ function EmptyState({
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Show if `onboardingMode=false` or any requested step not completed
|
// Show if `onboardingMode=false` or any requested step not completed
|
||||||
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
|
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
|
||||||
|
|
||||||
if (!shouldShow) {
|
if (!shouldShow) {
|
||||||
return null;
|
return null;
|
||||||
@@ -181,7 +181,7 @@ function EmptyState({
|
|||||||
];
|
];
|
||||||
|
|
||||||
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
|
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
|
||||||
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
|
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="empty-state-wrapper">
|
<div className="empty-state-wrapper">
|
||||||
@@ -196,7 +196,7 @@ function EmptyState({
|
|||||||
</div>
|
</div>
|
||||||
<div className="empty-state__steps">
|
<div className="empty-state__steps">
|
||||||
<h4>Let's get started</h4>
|
<h4>Let's get started</h4>
|
||||||
<ol>{stepsItems.map(item => item.node)}</ol>
|
<ol>{stepsItems.map((item) => item.node)}</ol>
|
||||||
{helpMessage}
|
{helpMessage}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import Link from "@/components/Link";
|
|||||||
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
||||||
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
|
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
|
||||||
import DynamicComponent from "@/components/DynamicComponent";
|
import DynamicComponent from "@/components/DynamicComponent";
|
||||||
|
import BeaconConsent from "@/components/BeaconConsent";
|
||||||
import PlainButton from "@/components/PlainButton";
|
import PlainButton from "@/components/PlainButton";
|
||||||
|
|
||||||
import { axios } from "@/services/axios";
|
import { axios } from "@/services/axios";
|
||||||
@@ -30,7 +31,8 @@ function DeprecatedEmbedFeatureAlert() {
|
|||||||
<Link
|
<Link
|
||||||
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
|
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
|
||||||
target="_blank"
|
target="_blank"
|
||||||
rel="noopener noreferrer">
|
rel="noopener noreferrer"
|
||||||
|
>
|
||||||
Read more
|
Read more
|
||||||
</Link>
|
</Link>
|
||||||
.
|
.
|
||||||
@@ -42,7 +44,7 @@ function DeprecatedEmbedFeatureAlert() {
|
|||||||
|
|
||||||
function EmailNotVerifiedAlert() {
|
function EmailNotVerifiedAlert() {
|
||||||
const verifyEmail = () => {
|
const verifyEmail = () => {
|
||||||
axios.post("verification_email/").then(data => {
|
axios.post("verification_email/").then((data) => {
|
||||||
notification.success(data.message);
|
notification.success(data.message);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -88,6 +90,7 @@ export default function Home() {
|
|||||||
</DynamicComponent>
|
</DynamicComponent>
|
||||||
<DynamicComponent name="HomeExtra" />
|
<DynamicComponent name="HomeExtra" />
|
||||||
<DashboardAndQueryFavoritesList />
|
<DashboardAndQueryFavoritesList />
|
||||||
|
<BeaconConsent />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
@@ -98,6 +101,6 @@ routes.register(
|
|||||||
routeWithUserSession({
|
routeWithUserSession({
|
||||||
path: "/",
|
path: "/",
|
||||||
title: "Redash",
|
title: "Redash",
|
||||||
render: pageProps => <Home {...pageProps} />,
|
render: (pageProps) => <Home {...pageProps} />,
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
|
|
||||||
export function QuerySourceTypeIcon(props) {
|
export function QuerySourceTypeIcon(props) {
|
||||||
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
|
||||||
}
|
}
|
||||||
|
|
||||||
QuerySourceTypeIcon.propTypes = {
|
QuerySourceTypeIcon.propTypes = {
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
|
|||||||
<div className="query-results-empty-state">
|
<div className="query-results-empty-state">
|
||||||
<div className="empty-state-content">
|
<div className="empty-state-content">
|
||||||
<div>
|
<div>
|
||||||
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
<img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
|
||||||
</div>
|
</div>
|
||||||
<h3>{title}</h3>
|
<h3>{title}</h3>
|
||||||
<div className="m-b-20">{message}</div>
|
<div className="m-b-20">{message}</div>
|
||||||
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
|
|||||||
|
|
||||||
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
|
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
|
||||||
const handleDelete = useCallback(
|
const handleDelete = useCallback(
|
||||||
e => {
|
(e) => {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
Modal.confirm({
|
Modal.confirm({
|
||||||
title: "Delete Visualization",
|
title: "Delete Visualization",
|
||||||
@@ -111,7 +111,8 @@ export default function QueryVisualizationTabs({
|
|||||||
className="add-visualization-button"
|
className="add-visualization-button"
|
||||||
data-test="NewVisualization"
|
data-test="NewVisualization"
|
||||||
type="link"
|
type="link"
|
||||||
onClick={() => onAddVisualization()}>
|
onClick={() => onAddVisualization()}
|
||||||
|
>
|
||||||
<i className="fa fa-plus" aria-hidden="true" />
|
<i className="fa fa-plus" aria-hidden="true" />
|
||||||
<span className="m-l-5 hidden-xs">Add Visualization</span>
|
<span className="m-l-5 hidden-xs">Add Visualization</span>
|
||||||
</Button>
|
</Button>
|
||||||
@@ -119,7 +120,7 @@ export default function QueryVisualizationTabs({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
|
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
|
||||||
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||||
const isMobile = useMedia({ maxWidth: 768 });
|
const isMobile = useMedia({ maxWidth: 768 });
|
||||||
|
|
||||||
const [filters, setFilters] = useState([]);
|
const [filters, setFilters] = useState([]);
|
||||||
@@ -132,9 +133,10 @@ export default function QueryVisualizationTabs({
|
|||||||
data-test="QueryPageVisualizationTabs"
|
data-test="QueryPageVisualizationTabs"
|
||||||
animated={false}
|
animated={false}
|
||||||
tabBarGutter={0}
|
tabBarGutter={0}
|
||||||
onChange={activeKey => onChangeTab(+activeKey)}
|
onChange={(activeKey) => onChangeTab(+activeKey)}
|
||||||
destroyInactiveTabPane>
|
destroyInactiveTabPane
|
||||||
{orderedVisualizations.map(visualization => (
|
>
|
||||||
|
{orderedVisualizations.map((visualization) => (
|
||||||
<TabPane
|
<TabPane
|
||||||
key={`${visualization.id}`}
|
key={`${visualization.id}`}
|
||||||
tab={
|
tab={
|
||||||
@@ -144,7 +146,8 @@ export default function QueryVisualizationTabs({
|
|||||||
visualizationName={visualization.name}
|
visualizationName={visualization.name}
|
||||||
onDelete={() => onDeleteVisualization(visualization.id)}
|
onDelete={() => onDeleteVisualization(visualization.id)}
|
||||||
/>
|
/>
|
||||||
}>
|
}
|
||||||
|
>
|
||||||
{queryResult ? (
|
{queryResult ? (
|
||||||
<VisualizationRenderer
|
<VisualizationRenderer
|
||||||
visualization={visualization}
|
visualization={visualization}
|
||||||
|
|||||||
@@ -1,16 +1,11 @@
|
|||||||
import { useCallback, useMemo, useState } from "react";
|
import { useCallback, useMemo, useState } from "react";
|
||||||
import { reduce } from "lodash";
|
|
||||||
import localOptions from "@/lib/localOptions";
|
import localOptions from "@/lib/localOptions";
|
||||||
|
|
||||||
function calculateTokensCount(schema) {
|
|
||||||
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function useAutocompleteFlags(schema) {
|
export default function useAutocompleteFlags(schema) {
|
||||||
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]);
|
const isAvailable = true;
|
||||||
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
|
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
|
||||||
|
|
||||||
const toggleAutocomplete = useCallback(state => {
|
const toggleAutocomplete = useCallback((state) => {
|
||||||
setIsEnabled(state);
|
setIsEnabled(state);
|
||||||
localOptions.set("liveAutocomplete", state);
|
localOptions.set("liveAutocomplete", state);
|
||||||
}, []);
|
}, []);
|
||||||
|
|||||||
@@ -0,0 +1,40 @@
|
|||||||
|
import React from "react";
|
||||||
|
import Form from "antd/lib/form";
|
||||||
|
import Checkbox from "antd/lib/checkbox";
|
||||||
|
import Skeleton from "antd/lib/skeleton";
|
||||||
|
import HelpTrigger from "@/components/HelpTrigger";
|
||||||
|
import DynamicComponent from "@/components/DynamicComponent";
|
||||||
|
import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
|
||||||
|
|
||||||
|
export default function BeaconConsentSettings(props) {
|
||||||
|
const { values, onChange, loading } = props;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<DynamicComponent name="OrganizationSettings.BeaconConsentSettings" {...props}>
|
||||||
|
<Form.Item
|
||||||
|
label={
|
||||||
|
<span>
|
||||||
|
Anonymous Usage Data Sharing
|
||||||
|
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||||
|
</span>
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{loading ? (
|
||||||
|
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||||
|
) : (
|
||||||
|
<Checkbox
|
||||||
|
name="beacon_consent"
|
||||||
|
checked={values.beacon_consent}
|
||||||
|
onChange={(e) => onChange({ beacon_consent: e.target.checked })}
|
||||||
|
>
|
||||||
|
Help Redash improve by automatically sending anonymous usage data
|
||||||
|
</Checkbox>
|
||||||
|
)}
|
||||||
|
</Form.Item>
|
||||||
|
</DynamicComponent>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
|
||||||
|
|
||||||
|
BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;
|
||||||
@@ -4,6 +4,7 @@ import DynamicComponent from "@/components/DynamicComponent";
|
|||||||
import FormatSettings from "./FormatSettings";
|
import FormatSettings from "./FormatSettings";
|
||||||
import PlotlySettings from "./PlotlySettings";
|
import PlotlySettings from "./PlotlySettings";
|
||||||
import FeatureFlagsSettings from "./FeatureFlagsSettings";
|
import FeatureFlagsSettings from "./FeatureFlagsSettings";
|
||||||
|
import BeaconConsentSettings from "./BeaconConsentSettings";
|
||||||
|
|
||||||
export default function GeneralSettings(props) {
|
export default function GeneralSettings(props) {
|
||||||
return (
|
return (
|
||||||
@@ -13,6 +14,7 @@ export default function GeneralSettings(props) {
|
|||||||
<FormatSettings {...props} />
|
<FormatSettings {...props} />
|
||||||
<PlotlySettings {...props} />
|
<PlotlySettings {...props} />
|
||||||
<FeatureFlagsSettings {...props} />
|
<FeatureFlagsSettings {...props} />
|
||||||
|
<BeaconConsentSettings {...props} />
|
||||||
</DynamicComponent>
|
</DynamicComponent>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
|
|||||||
|
|
||||||
export const SCHEMA_NOT_SUPPORTED = 1;
|
export const SCHEMA_NOT_SUPPORTED = 1;
|
||||||
export const SCHEMA_LOAD_ERROR = 2;
|
export const SCHEMA_LOAD_ERROR = 2;
|
||||||
export const IMG_ROOT = "static/images/db-logos";
|
export const IMG_ROOT = "/static/images/db-logos";
|
||||||
|
|
||||||
function mapSchemaColumnsToObject(columns) {
|
function mapSchemaColumnsToObject(columns) {
|
||||||
return map(columns, column => (isObject(column) ? column : { name: column }));
|
return map(columns, (column) => (isObject(column) ? column : { name: column }));
|
||||||
}
|
}
|
||||||
|
|
||||||
const DataSource = {
|
const DataSource = {
|
||||||
query: () => axios.get("api/data_sources"),
|
query: () => axios.get("api/data_sources"),
|
||||||
get: ({ id }) => axios.get(`api/data_sources/${id}`),
|
get: ({ id }) => axios.get(`api/data_sources/${id}`),
|
||||||
types: () => axios.get("api/data_sources/types"),
|
types: () => axios.get("api/data_sources/types"),
|
||||||
create: data => axios.post(`api/data_sources`, data),
|
create: (data) => axios.post(`api/data_sources`, data),
|
||||||
save: data => axios.post(`api/data_sources/${data.id}`, data),
|
save: (data) => axios.post(`api/data_sources/${data.id}`, data),
|
||||||
test: data => axios.post(`api/data_sources/${data.id}/test`),
|
test: (data) => axios.post(`api/data_sources/${data.id}/test`),
|
||||||
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
|
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
|
||||||
fetchSchema: (data, refresh = false) => {
|
fetchSchema: (data, refresh = false) => {
|
||||||
const params = {};
|
const params = {};
|
||||||
@@ -27,15 +27,15 @@ const DataSource = {
|
|||||||
|
|
||||||
return axios
|
return axios
|
||||||
.get(`api/data_sources/${data.id}/schema`, { params })
|
.get(`api/data_sources/${data.id}/schema`, { params })
|
||||||
.then(data => {
|
.then((data) => {
|
||||||
if (has(data, "job")) {
|
if (has(data, "job")) {
|
||||||
return fetchDataFromJob(data.job.id).catch(error =>
|
return fetchDataFromJob(data.job.id).catch((error) =>
|
||||||
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
|
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return has(data, "schema") ? data.schema : Promise.reject();
|
return has(data, "schema") ? data.schema : Promise.reject();
|
||||||
})
|
})
|
||||||
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
.then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ function runCypressCI() {
|
|||||||
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
|
||||||
} = process.env;
|
} = process.env;
|
||||||
|
|
||||||
if (GITHUB_REPOSITORY === "getredash/redash") {
|
if (GITHUB_REPOSITORY === "getredash/redash" && process.env.CYPRESS_RECORD_KEY) {
|
||||||
process.env.CYPRESS_OPTIONS = "--record";
|
process.env.CYPRESS_OPTIONS = "--record";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "redash-client",
|
"name": "redash-client",
|
||||||
"version": "24.11.0-dev",
|
"version": "25.02.0-dev",
|
||||||
"description": "The frontend part of Redash.",
|
"description": "The frontend part of Redash.",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|||||||
210
poetry.lock
generated
210
poetry.lock
generated
@@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "adal"
|
name = "adal"
|
||||||
@@ -974,6 +974,41 @@ sqlalchemy = "*"
|
|||||||
sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
|
sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
|
||||||
superset = ["apache-superset (>=1.4.1)"]
|
superset = ["apache-superset (>=1.4.1)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "debugpy"
|
||||||
|
version = "1.8.9"
|
||||||
|
description = "An implementation of the Debug Adapter Protocol for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "debugpy-1.8.9-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:cfe1e6c6ad7178265f74981edf1154ffce97b69005212fbc90ca22ddfe3d017e"},
|
||||||
|
{file = "debugpy-1.8.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada7fb65102a4d2c9ab62e8908e9e9f12aed9d76ef44880367bc9308ebe49a0f"},
|
||||||
|
{file = "debugpy-1.8.9-cp310-cp310-win32.whl", hash = "sha256:c36856343cbaa448171cba62a721531e10e7ffb0abff838004701454149bc037"},
|
||||||
|
{file = "debugpy-1.8.9-cp310-cp310-win_amd64.whl", hash = "sha256:17c5e0297678442511cf00a745c9709e928ea4ca263d764e90d233208889a19e"},
|
||||||
|
{file = "debugpy-1.8.9-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:b74a49753e21e33e7cf030883a92fa607bddc4ede1aa4145172debc637780040"},
|
||||||
|
{file = "debugpy-1.8.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d22dacdb0e296966d7d74a7141aaab4bec123fa43d1a35ddcb39bf9fd29d70"},
|
||||||
|
{file = "debugpy-1.8.9-cp311-cp311-win32.whl", hash = "sha256:8138efff315cd09b8dcd14226a21afda4ca582284bf4215126d87342bba1cc66"},
|
||||||
|
{file = "debugpy-1.8.9-cp311-cp311-win_amd64.whl", hash = "sha256:ff54ef77ad9f5c425398efb150239f6fe8e20c53ae2f68367eba7ece1e96226d"},
|
||||||
|
{file = "debugpy-1.8.9-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:957363d9a7a6612a37458d9a15e72d03a635047f946e5fceee74b50d52a9c8e2"},
|
||||||
|
{file = "debugpy-1.8.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e565fc54b680292b418bb809f1386f17081d1346dca9a871bf69a8ac4071afe"},
|
||||||
|
{file = "debugpy-1.8.9-cp312-cp312-win32.whl", hash = "sha256:3e59842d6c4569c65ceb3751075ff8d7e6a6ada209ceca6308c9bde932bcef11"},
|
||||||
|
{file = "debugpy-1.8.9-cp312-cp312-win_amd64.whl", hash = "sha256:66eeae42f3137eb428ea3a86d4a55f28da9bd5a4a3d369ba95ecc3a92c1bba53"},
|
||||||
|
{file = "debugpy-1.8.9-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:957ecffff80d47cafa9b6545de9e016ae8c9547c98a538ee96ab5947115fb3dd"},
|
||||||
|
{file = "debugpy-1.8.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1efbb3ff61487e2c16b3e033bc8595aea578222c08aaf3c4bf0f93fadbd662ee"},
|
||||||
|
{file = "debugpy-1.8.9-cp313-cp313-win32.whl", hash = "sha256:7c4d65d03bee875bcb211c76c1d8f10f600c305dbd734beaed4077e902606fee"},
|
||||||
|
{file = "debugpy-1.8.9-cp313-cp313-win_amd64.whl", hash = "sha256:e46b420dc1bea64e5bbedd678148be512442bc589b0111bd799367cde051e71a"},
|
||||||
|
{file = "debugpy-1.8.9-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:472a3994999fe6c0756945ffa359e9e7e2d690fb55d251639d07208dbc37caea"},
|
||||||
|
{file = "debugpy-1.8.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365e556a4772d7d0d151d7eb0e77ec4db03bcd95f26b67b15742b88cacff88e9"},
|
||||||
|
{file = "debugpy-1.8.9-cp38-cp38-win32.whl", hash = "sha256:54a7e6d3014c408eb37b0b06021366ee985f1539e12fe49ca2ee0d392d9ceca5"},
|
||||||
|
{file = "debugpy-1.8.9-cp38-cp38-win_amd64.whl", hash = "sha256:8e99c0b1cc7bf86d83fb95d5ccdc4ad0586d4432d489d1f54e4055bcc795f693"},
|
||||||
|
{file = "debugpy-1.8.9-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:7e8b079323a56f719977fde9d8115590cb5e7a1cba2fcee0986ef8817116e7c1"},
|
||||||
|
{file = "debugpy-1.8.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6953b335b804a41f16a192fa2e7851bdcfd92173cbb2f9f777bb934f49baab65"},
|
||||||
|
{file = "debugpy-1.8.9-cp39-cp39-win32.whl", hash = "sha256:7e646e62d4602bb8956db88b1e72fe63172148c1e25c041e03b103a25f36673c"},
|
||||||
|
{file = "debugpy-1.8.9-cp39-cp39-win_amd64.whl", hash = "sha256:3d9755e77a2d680ce3d2c5394a444cf42be4a592caaf246dbfbdd100ffcf7ae5"},
|
||||||
|
{file = "debugpy-1.8.9-py2.py3-none-any.whl", hash = "sha256:cc37a6c9987ad743d9c3a14fa1b1a14b7e4e6041f9dd0c8abf8895fe7a97b899"},
|
||||||
|
{file = "debugpy-1.8.9.zip", hash = "sha256:1339e14c7d980407248f09824d1b25ff5c5616651689f1e0f0e51bdead3ea13e"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "defusedxml"
|
name = "defusedxml"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
@@ -1316,6 +1351,45 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
python-dateutil = ">=2.7"
|
python-dateutil = ">=2.7"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fsspec"
|
||||||
|
version = "2024.10.0"
|
||||||
|
description = "File-system specification"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871"},
|
||||||
|
{file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
abfs = ["adlfs"]
|
||||||
|
adl = ["adlfs"]
|
||||||
|
arrow = ["pyarrow (>=1)"]
|
||||||
|
dask = ["dask", "distributed"]
|
||||||
|
dev = ["pre-commit", "ruff"]
|
||||||
|
doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"]
|
||||||
|
dropbox = ["dropbox", "dropboxdrivefs", "requests"]
|
||||||
|
full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
|
||||||
|
fuse = ["fusepy"]
|
||||||
|
gcs = ["gcsfs"]
|
||||||
|
git = ["pygit2"]
|
||||||
|
github = ["requests"]
|
||||||
|
gs = ["gcsfs"]
|
||||||
|
gui = ["panel"]
|
||||||
|
hdfs = ["pyarrow (>=1)"]
|
||||||
|
http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"]
|
||||||
|
libarchive = ["libarchive-c"]
|
||||||
|
oci = ["ocifs"]
|
||||||
|
s3 = ["s3fs"]
|
||||||
|
sftp = ["paramiko"]
|
||||||
|
smb = ["smbprotocol"]
|
||||||
|
ssh = ["paramiko"]
|
||||||
|
test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"]
|
||||||
|
test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"]
|
||||||
|
test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"]
|
||||||
|
tqdm = ["tqdm"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "funcy"
|
name = "funcy"
|
||||||
version = "1.13"
|
version = "1.13"
|
||||||
@@ -1988,13 +2062,13 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jinja2"
|
name = "jinja2"
|
||||||
version = "3.1.4"
|
version = "3.1.5"
|
||||||
description = "A very fast and expressive template engine."
|
description = "A very fast and expressive template engine."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
|
{file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
|
||||||
{file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
|
{file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -2756,13 +2830,13 @@ test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "paramiko"
|
name = "paramiko"
|
||||||
version = "3.4.0"
|
version = "3.4.1"
|
||||||
description = "SSH2 protocol library"
|
description = "SSH2 protocol library"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"},
|
{file = "paramiko-3.4.1-py3-none-any.whl", hash = "sha256:8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32"},
|
||||||
{file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"},
|
{file = "paramiko-3.4.1.tar.gz", hash = "sha256:8b15302870af7f6652f2e038975c1d2973f06046cb5d7d65355668b3ecbece0c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -3076,40 +3150,6 @@ pygments = "*"
|
|||||||
all = ["black"]
|
all = ["black"]
|
||||||
ptipython = ["ipython"]
|
ptipython = ["ipython"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ptvsd"
|
|
||||||
version = "4.3.2"
|
|
||||||
description = "Remote debugging server for Python support in Visual Studio and Visual Studio Code"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
|
|
||||||
files = [
|
|
||||||
{file = "ptvsd-4.3.2-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:22b699369a18ff28d4d1aa6a452739e50c7b7790cb16c6312d766e023c12fe27"},
|
|
||||||
{file = "ptvsd-4.3.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3f839fe91d9ddca0d6a3a0afd6a1c824be1768498a737ab9333d084c5c3f3591"},
|
|
||||||
{file = "ptvsd-4.3.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:70260b4591c07bff95566d49b6a5dc3051d8558035c43c847bad9a954def46bb"},
|
|
||||||
{file = "ptvsd-4.3.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d2662ec37ee049c0f8f2f9a378abeb7e570d9215c19eaf0a6d7189464195009f"},
|
|
||||||
{file = "ptvsd-4.3.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d9337ebba4d099698982e090b203e85670086c4b29cf1185b2e45cd353a8053e"},
|
|
||||||
{file = "ptvsd-4.3.2-cp34-cp34m-macosx_10_13_x86_64.whl", hash = "sha256:cf09fd4d90c4c42ddd9bf853290f1a80bc2128993a3923bd3b96b68cc1acd03f"},
|
|
||||||
{file = "ptvsd-4.3.2-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:ccc5c533135305709461f545feed5061c608714db38fa0f58e3f848a127b7fde"},
|
|
||||||
{file = "ptvsd-4.3.2-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:de5234bec74c47da668e1a1a21bcc9821af0cbb28b5153df78cd5abc744b29a2"},
|
|
||||||
{file = "ptvsd-4.3.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:c893fb9d1c2ef8f980cc00ced3fd90356f86d9f59b58ee97e0e7e622b8860f76"},
|
|
||||||
{file = "ptvsd-4.3.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2bbc121bce3608501998afbe742f02b80e7d26b8fecd38f78b903f22f52a81d9"},
|
|
||||||
{file = "ptvsd-4.3.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:fad06de012a78f277318d0c308dd3d7cc1f67167f3b2e1e2f7c6caf04c03440c"},
|
|
||||||
{file = "ptvsd-4.3.2-cp35-cp35m-win32.whl", hash = "sha256:92d26aa7c8f7ffe41cb4b50a00846027027fa17acdf2d9dd8c24de77b25166c6"},
|
|
||||||
{file = "ptvsd-4.3.2-cp35-cp35m-win_amd64.whl", hash = "sha256:eda10ecd43daacc180a6fbe524992be76a877c3559e2b78016b4ada8fec10273"},
|
|
||||||
{file = "ptvsd-4.3.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c01204e3f025c3f7252c79c1a8a028246d29e3ef339e1a01ddf652999f47bdea"},
|
|
||||||
{file = "ptvsd-4.3.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c97c71835dde7e67fc7b06398bee1c012559a0784ebda9cf8acaf176c7ae766c"},
|
|
||||||
{file = "ptvsd-4.3.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:612948a045fcf9c8931cd306972902440278f34de7ca684b49d4caeec9f1ec62"},
|
|
||||||
{file = "ptvsd-4.3.2-cp36-cp36m-win32.whl", hash = "sha256:72d114baa5737baf29c8068d1ccdd93cbb332d2030601c888eed0e3761b588d7"},
|
|
||||||
{file = "ptvsd-4.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:58508485a1609a495dd45829bd6d219303cf9edef5ca1f01a9ed8ffaa87f390c"},
|
|
||||||
{file = "ptvsd-4.3.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:20f48ffed42a6beb879c250d82662e175ad59cc46a29c95c6a4472ae413199c5"},
|
|
||||||
{file = "ptvsd-4.3.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b9970e3dc987eb2a6001af6c9d2f726dd6455cfc6d47e0f51925cbdee7ea2157"},
|
|
||||||
{file = "ptvsd-4.3.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1d3d82ecc82186d099992a748556e6e54037f5c5e4d3fc9bba3e2302354be0d4"},
|
|
||||||
{file = "ptvsd-4.3.2-cp37-cp37m-win32.whl", hash = "sha256:10745fbb788001959b4de405198d8bd5243611a88fb5a2e2c6800245bc0ddd74"},
|
|
||||||
{file = "ptvsd-4.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:90cbd082e7a9089664888d0d94aca760202f080133fca8f3fe65c48ed6b9e39d"},
|
|
||||||
{file = "ptvsd-4.3.2-py2.py3-none-any.whl", hash = "sha256:459137736068bb02515040b2ed2738169cb30d69a38e0fd5dffcba255f41e68d"},
|
|
||||||
{file = "ptvsd-4.3.2.zip", hash = "sha256:3b05c06018fdbce5943c50fb0baac695b5c11326f9e21a5266c854306bda28ab"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pure-sasl"
|
name = "pure-sasl"
|
||||||
version = "0.6.2"
|
version = "0.6.2"
|
||||||
@@ -3151,23 +3191,25 @@ pyasn1 = ">=0.4.6,<0.6.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyathena"
|
name = "pyathena"
|
||||||
version = "1.11.5"
|
version = "2.25.2"
|
||||||
description = "Python DB API 2.0 (PEP 249) client for Amazon Athena"
|
description = "Python DB API 2.0 (PEP 249) client for Amazon Athena"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
python-versions = ">=3.7.1,<4.0.0"
|
||||||
files = [
|
files = [
|
||||||
{file = "PyAthena-1.11.5-py2.py3-none-any.whl", hash = "sha256:8cc5d40236993fe5241bb625e78d0a0a149e629b74569a9636b49168448a7ac8"},
|
{file = "pyathena-2.25.2-py3-none-any.whl", hash = "sha256:df7855fec5cc675511431d7c72b814346ebd7e51ed32181ec95847154f79210b"},
|
||||||
{file = "PyAthena-1.11.5.tar.gz", hash = "sha256:86c0f4d10528de44fcd63222506949b010dff36ad57116e4c1274c1cfa9477d0"},
|
{file = "pyathena-2.25.2.tar.gz", hash = "sha256:aebb8254dd7b2a450841ee3552bf443002a2deaed93fae0ae6f4258b5eb2d367"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
boto3 = ">=1.4.4"
|
boto3 = ">=1.26.4"
|
||||||
botocore = ">=1.5.52"
|
botocore = ">=1.29.4"
|
||||||
future = "*"
|
fsspec = "*"
|
||||||
tenacity = ">=4.1.0"
|
tenacity = ">=4.1.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
pandas = ["pandas (>=0.24.0)", "pyarrow (>=0.15.0)"]
|
arrow = ["pyarrow (>=7.0.0)"]
|
||||||
|
fastparquet = ["fastparquet (>=0.4.0)"]
|
||||||
|
pandas = ["pandas (>=1.3.0)"]
|
||||||
sqlalchemy = ["sqlalchemy (>=1.0.0,<2.0.0)"]
|
sqlalchemy = ["sqlalchemy (>=1.0.0,<2.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4602,56 +4644,56 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "snowflake-connector-python"
|
name = "snowflake-connector-python"
|
||||||
version = "3.12.3"
|
version = "3.13.1"
|
||||||
description = "Snowflake Connector for Python"
|
description = "Snowflake Connector for Python"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "snowflake_connector_python-3.12.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:497a096fc379ef0846b2f1cf11a8d7620f0d090f08a77d9e93473845014d57d1"},
|
{file = "snowflake_connector_python-3.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b636641df38b7b951f62a7f53a6444576bbbadddd2d73615f7ceade4e79b32d7"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:055c5808d524497213e4cc9ae91ec3e46cb8342b314e78bc3e139d733dc16741"},
|
{file = "snowflake_connector_python-3.13.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:bb7b617de91a74ba69057f4b78ef685dfd14c18fc5208861c0bf4d733fb80b7a"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a5dc512d62ef693041ed2ad82931231caddc16e14ffc2842da3e3dd4240b83d"},
|
{file = "snowflake_connector_python-3.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab4707f2d7bf2e4202c50b5c6250bff1fcbab471b1d82c2608c0adafd3970dc"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a46448f7279d444084eb84a9cddea67662e80ccfaddf41713b9e9aab2b1242e9"},
|
{file = "snowflake_connector_python-3.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15e00c0972233884d6b881efcdb5467a415fea1aa094a55985dc5aad66a3711"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:821b774b77129ce9f03729456ac1f21d69fedb50e5ce957178131c7bb3d8279f"},
|
{file = "snowflake_connector_python-3.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:05be6d96a4b771c1c81be947f6ca5c8b22925f4e415cc9d0c05dd0bdfaaee5cd"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82290134978d11628026b447052219ce8d880e36937204f1f0332dfc3f2e92e9"},
|
{file = "snowflake_connector_python-3.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e51c3b0ca8b964c96e8592112422cb038d22bc4f94c9443a17871225df9f0de4"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:20b5c8000ee9cee11b0f9a6ae26640f0d498ce77f7e2ec649a2f0d306523792d"},
|
{file = "snowflake_connector_python-3.13.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:4f34b805cae831ab8791599c513b0bd65186981ae5b7d0e22001922dcb3a29c5"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca6500d16bdbd37da88e589cc3e82b90272471d3aabfe4a79ec1cf4696675acf"},
|
{file = "snowflake_connector_python-3.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bdbf58f625bb126a15112d3e6e35c68c2b5a7309a8faa173b3e80a284c4499e"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b455ba117a68da436e253899674fae1a93669eaefdde8a903c03eb65b7e87c86"},
|
{file = "snowflake_connector_python-3.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51938948af9fda9d53acfce2e440963479c8b9cd0d05f8cbed06bd42bdf9c7ac"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp311-cp311-win_amd64.whl", hash = "sha256:205219fcaeee2d33db5d0d023d60518e3bd8272ce1679be2199d7f362d255054"},
|
{file = "snowflake_connector_python-3.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:8e4bb3f3a9dd5b5c516e6414c5991787715da5067a3d6d5ccdd2d124c56cdfef"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d830ca32c864b730cba5d92900d850752199635c4fb0ae0a70ee677f62aee70"},
|
{file = "snowflake_connector_python-3.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f856fc29e7bacdec6ffa449de1d91da50637ba8d9fa675cec640f5853b2a79ba"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:597b0c74ec57ba693191ae2de8db9536e349ee32cab152df657473e498b6fd87"},
|
{file = "snowflake_connector_python-3.13.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:ba9bf43425e8938d7bdc0f0d9488783846e810af8173a1a35e642b6795180ca0"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2215d8a4c5e25ea0d2183fe693c3fdf058cd6035e5c84710d532dc04ab4ffd31"},
|
{file = "snowflake_connector_python-3.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1540099913bafbccf7f2ca4b0e49893ee44f0645c94ec59f7c7c147085e64d5"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ba9c261904c1ba7cae6035c7881224cf979da39c8b7c7cb10236fdfc57e505"},
|
{file = "snowflake_connector_python-3.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8ee93ed3e89452b411cfcbe22efcf4796910ca014add2aa9bae0e8648e3b207"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp312-cp312-win_amd64.whl", hash = "sha256:f0d0fcb948ef0812ab162ec9767622f345554043a07439c0c1a9474c86772320"},
|
{file = "snowflake_connector_python-3.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:0a55795f2d737adf89dbbc7829c971d44742fd3c0bf0401314a26178cdcde700"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fe742a0b2fb1c79a21e95b97c49a05783bc00314d1184d227c5fe5b57688af12"},
|
{file = "snowflake_connector_python-3.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2435e630d9cec64574045c38d3f1c795ee91b157dae12187c585318dcb42572d"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:a8584a44a6bb41d2056cf1b833e629c76e28c5303d2c875c1a23bda46a1cd43a"},
|
{file = "snowflake_connector_python-3.13.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:ad2646fb1704f43867718fa1b2b5d5b5ca8352b4fef2ef7f19bb18bd5b9ec5bd"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd990db8e4886c32ba5c63758e8dc4814e2e75f5fd3fe79d43f7e5ee0fc46793"},
|
{file = "snowflake_connector_python-3.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d56d43e5ddacc63b3b5e87bd98ee206da52388aac4a6e0d460e225430c5f1897"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4fe7f91f6e44bda877e77403a586d7487ca2c52dc1a32a705b2fea33f9c763a"},
|
{file = "snowflake_connector_python-3.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f32e0dcce63ff9ebdcad0a3c24465232a9667d38071c048b116b0bef85812a5"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:4994e95eff593dc44c28243ef0ae8d27b8b1aeb96dd64cbcea5bcf0e4dfb77fb"},
|
{file = "snowflake_connector_python-3.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:8dfa6398a9424fad23fe32b0db5e27553453f8062e2ea498b8d81ff825791e6c"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ac33a7dd54b35f94c4b91369971dbd6467a914dff4b01c46e77e7e6901d7eca4"},
|
{file = "snowflake_connector_python-3.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622484c3eedb9f4deb2a4f5124e25580bc43c39220e3c7ec01e18898e9a8020e"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a26876322811fe2b93f6d814dcfe016f1df680a12624026ecf57a6bcdf20f969"},
|
{file = "snowflake_connector_python-3.13.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:8fdf757ca07ef4241bcc9d82123d104ab1cdb8525421ff56dab6d86802a59b81"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0bb390be2e15b6b7cccab7fbe1ef94e1e9ab13790c974aa44761298cdc2641"},
|
{file = "snowflake_connector_python-3.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcce282b3ac906063df7e16678b0c829b44384070b63c01a643fe67cd82343a"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7340f73af4ae72e6af8fe28a1b8e196a0c99943071afc96ce419efb4da80035"},
|
{file = "snowflake_connector_python-3.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60401ab607be786081a0ba02128b230ab5c6afa7babaa215a327943dc0e293ac"},
|
||||||
{file = "snowflake_connector_python-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:c314749bd0151218b654a7d4646a39067ab650bdc86dfebb1884b056b0bdb4b4"},
|
{file = "snowflake_connector_python-3.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:afcae3b8a38ba24ec0ca65208f282c234add7624cecbf968a564ebd2324a3287"},
|
||||||
{file = "snowflake_connector_python-3.12.3.tar.gz", hash = "sha256:02873c7f7a3b10322e28dddc2be6907f8ab8ecad93d6d6af14c77c2f53091b88"},
|
{file = "snowflake_connector_python-3.13.1.tar.gz", hash = "sha256:6d0f515f24efb58c7dae26ac681dac032cb4d0c94a1cb676a50b5d41c812b5b2"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
asn1crypto = ">0.24.0,<2.0.0"
|
asn1crypto = ">0.24.0,<2.0.0"
|
||||||
certifi = ">=2017.4.17"
|
certifi = ">=2017.4.17"
|
||||||
cffi = ">=1.9,<2.0.0"
|
cffi = ">=1.9,<2.0.0"
|
||||||
charset-normalizer = ">=2,<4"
|
charset_normalizer = ">=2,<4"
|
||||||
cryptography = ">=3.1.0"
|
cryptography = ">=3.1.0"
|
||||||
filelock = ">=3.5,<4"
|
filelock = ">=3.5,<4"
|
||||||
idna = ">=2.5,<4"
|
idna = ">=2.5,<4"
|
||||||
packaging = "*"
|
packaging = "*"
|
||||||
platformdirs = ">=2.6.0,<5.0.0"
|
platformdirs = ">=2.6.0,<5.0.0"
|
||||||
pyjwt = "<3.0.0"
|
pyjwt = "<3.0.0"
|
||||||
pyOpenSSL = ">=16.2.0,<25.0.0"
|
pyOpenSSL = ">=22.0.0,<25.0.0"
|
||||||
pytz = "*"
|
pytz = "*"
|
||||||
requests = "<3.0.0"
|
requests = "<3.0.0"
|
||||||
sortedcontainers = ">=2.4.0"
|
sortedcontainers = ">=2.4.0"
|
||||||
tomlkit = "*"
|
tomlkit = "*"
|
||||||
typing-extensions = ">=4.3,<5"
|
typing_extensions = ">=4.3,<5"
|
||||||
urllib3 = {version = ">=1.21.1,<2.0.0", markers = "python_version < \"3.10\""}
|
urllib3 = {version = ">=1.21.1,<2.0.0", markers = "python_version < \"3.10\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@@ -5112,13 +5154,13 @@ six = ">=1.10.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "virtualenv"
|
name = "virtualenv"
|
||||||
version = "20.25.0"
|
version = "20.26.6"
|
||||||
description = "Virtual Python Environment builder"
|
description = "Virtual Python Environment builder"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
|
{file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"},
|
||||||
{file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
|
{file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -5127,7 +5169,7 @@ filelock = ">=3.12.2,<4"
|
|||||||
platformdirs = ">=3.9.1,<5"
|
platformdirs = ">=3.9.1,<5"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
|
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
|
||||||
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
|
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5451,4 +5493,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.8,<3.11"
|
python-versions = ">=3.8,<3.11"
|
||||||
content-hash = "00eb72e7f054606807de9f5dc727b446684a22ec7d450e18b5be9592ef017924"
|
content-hash = "3ca1687df8b492af64ad73038d700cdabbd9ff4e3adda001ed7ca5976492de5e"
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ force-exclude = '''
|
|||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "redash"
|
name = "redash"
|
||||||
version = "24.11.0-dev"
|
version = "25.02.0-dev"
|
||||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||||
@@ -46,7 +46,7 @@ greenlet = "2.0.2"
|
|||||||
gunicorn = "22.0.0"
|
gunicorn = "22.0.0"
|
||||||
httplib2 = "0.19.0"
|
httplib2 = "0.19.0"
|
||||||
itsdangerous = "2.1.2"
|
itsdangerous = "2.1.2"
|
||||||
jinja2 = "3.1.4"
|
jinja2 = "3.1.5"
|
||||||
jsonschema = "3.1.1"
|
jsonschema = "3.1.1"
|
||||||
markupsafe = "2.1.1"
|
markupsafe = "2.1.1"
|
||||||
maxminddb-geolite2 = "2018.703"
|
maxminddb-geolite2 = "2018.703"
|
||||||
@@ -86,6 +86,8 @@ wtforms = "2.2.1"
|
|||||||
xlsxwriter = "1.2.2"
|
xlsxwriter = "1.2.2"
|
||||||
tzlocal = "4.3.1"
|
tzlocal = "4.3.1"
|
||||||
pyodbc = "5.1.0"
|
pyodbc = "5.1.0"
|
||||||
|
debugpy = "^1.8.9"
|
||||||
|
paramiko = "3.4.1"
|
||||||
|
|
||||||
[tool.poetry.group.all_ds]
|
[tool.poetry.group.all_ds]
|
||||||
optional = true
|
optional = true
|
||||||
@@ -116,7 +118,7 @@ pandas = "1.3.4"
|
|||||||
phoenixdb = "0.7"
|
phoenixdb = "0.7"
|
||||||
pinotdb = ">=0.4.5"
|
pinotdb = ">=0.4.5"
|
||||||
protobuf = "3.20.2"
|
protobuf = "3.20.2"
|
||||||
pyathena = ">=1.5.0,<=1.11.5"
|
pyathena = "2.25.2"
|
||||||
pydgraph = "2.0.2"
|
pydgraph = "2.0.2"
|
||||||
pydruid = "0.5.7"
|
pydruid = "0.5.7"
|
||||||
pyexasol = "0.12.0"
|
pyexasol = "0.12.0"
|
||||||
@@ -130,7 +132,7 @@ python-rapidjson = "1.20"
|
|||||||
requests-aws-sign = "0.1.5"
|
requests-aws-sign = "0.1.5"
|
||||||
sasl = ">=0.1.3"
|
sasl = ">=0.1.3"
|
||||||
simple-salesforce = "0.74.3"
|
simple-salesforce = "0.74.3"
|
||||||
snowflake-connector-python = "3.12.3"
|
snowflake-connector-python = "3.13.1"
|
||||||
td-client = "1.0.0"
|
td-client = "1.0.0"
|
||||||
thrift = ">=0.8.0"
|
thrift = ">=0.8.0"
|
||||||
thrift-sasl = ">=0.1.0"
|
thrift-sasl = ">=0.1.0"
|
||||||
@@ -156,7 +158,6 @@ jwcrypto = "1.5.6"
|
|||||||
mock = "5.0.2"
|
mock = "5.0.2"
|
||||||
pre-commit = "3.3.3"
|
pre-commit = "3.3.3"
|
||||||
ptpython = "3.0.23"
|
ptpython = "3.0.23"
|
||||||
ptvsd = "4.3.2"
|
|
||||||
pytest-cov = "4.1.0"
|
pytest-cov = "4.1.0"
|
||||||
watchdog = "3.0.0"
|
watchdog = "3.0.0"
|
||||||
ruff = "0.0.289"
|
ruff = "0.0.289"
|
||||||
|
|||||||
@@ -14,13 +14,14 @@ from redash.app import create_app # noqa
|
|||||||
from redash.destinations import import_destinations
|
from redash.destinations import import_destinations
|
||||||
from redash.query_runner import import_query_runners
|
from redash.query_runner import import_query_runners
|
||||||
|
|
||||||
__version__ = "24.11.0-dev"
|
__version__ = "25.02.0-dev"
|
||||||
|
|
||||||
|
|
||||||
if os.environ.get("REMOTE_DEBUG"):
|
if os.environ.get("REMOTE_DEBUG"):
|
||||||
import ptvsd
|
import debugpy
|
||||||
|
|
||||||
ptvsd.enable_attach(address=("0.0.0.0", 5678))
|
debugpy.listen(("0.0.0.0", 5678))
|
||||||
|
debugpy.wait_for_client()
|
||||||
|
|
||||||
|
|
||||||
def setup_logging():
|
def setup_logging():
|
||||||
|
|||||||
@@ -36,10 +36,14 @@ def create_app():
|
|||||||
from .metrics import request as request_metrics
|
from .metrics import request as request_metrics
|
||||||
from .models import db, users
|
from .models import db, users
|
||||||
from .utils import sentry
|
from .utils import sentry
|
||||||
|
from .version_check import reset_new_version_status
|
||||||
|
|
||||||
sentry.init()
|
sentry.init()
|
||||||
app = Redash()
|
app = Redash()
|
||||||
|
|
||||||
|
# Check and update the cached version for use by the client
|
||||||
|
reset_new_version_status()
|
||||||
|
|
||||||
security.init_app(app)
|
security.init_app(app)
|
||||||
request_metrics.init_app(app)
|
request_metrics.init_app(app)
|
||||||
db.init_app(app)
|
db.init_app(app)
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import html
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
@@ -37,6 +39,129 @@ class Webex(BaseDestination):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def formatted_attachments_template(subject, description, query_link, alert_link):
|
def formatted_attachments_template(subject, description, query_link, alert_link):
|
||||||
|
# Attempt to parse the description to find a 2D array
|
||||||
|
try:
|
||||||
|
# Extract the part of the description that looks like a JSON array
|
||||||
|
start_index = description.find("[")
|
||||||
|
end_index = description.rfind("]") + 1
|
||||||
|
json_array_str = description[start_index:end_index]
|
||||||
|
|
||||||
|
# Decode HTML entities
|
||||||
|
json_array_str = html.unescape(json_array_str)
|
||||||
|
|
||||||
|
# Replace single quotes with double quotes for valid JSON
|
||||||
|
json_array_str = json_array_str.replace("'", '"')
|
||||||
|
|
||||||
|
# Load the JSON array
|
||||||
|
data_array = json.loads(json_array_str)
|
||||||
|
|
||||||
|
# Check if it's a 2D array
|
||||||
|
if isinstance(data_array, list) and all(isinstance(i, list) for i in data_array):
|
||||||
|
# Create a table for the Adaptive Card
|
||||||
|
table_rows = []
|
||||||
|
for row in data_array:
|
||||||
|
table_rows.append(
|
||||||
|
{
|
||||||
|
"type": "ColumnSet",
|
||||||
|
"columns": [
|
||||||
|
{"type": "Column", "items": [{"type": "TextBlock", "text": str(item), "wrap": True}]}
|
||||||
|
for item in row
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the body of the card with the table
|
||||||
|
body = (
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"{subject}",
|
||||||
|
"weight": "bolder",
|
||||||
|
"size": "medium",
|
||||||
|
"wrap": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"{description[:start_index]}",
|
||||||
|
"isSubtle": True,
|
||||||
|
"wrap": True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
+ table_rows
|
||||||
|
+ [
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"Click [here]({query_link}) to check your query!",
|
||||||
|
"wrap": True,
|
||||||
|
"isSubtle": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||||
|
"wrap": True,
|
||||||
|
"isSubtle": True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Fallback to the original description if no valid 2D array is found
|
||||||
|
body = [
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"{subject}",
|
||||||
|
"weight": "bolder",
|
||||||
|
"size": "medium",
|
||||||
|
"wrap": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"{description}",
|
||||||
|
"isSubtle": True,
|
||||||
|
"wrap": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"Click [here]({query_link}) to check your query!",
|
||||||
|
"wrap": True,
|
||||||
|
"isSubtle": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||||
|
"wrap": True,
|
||||||
|
"isSubtle": True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# If parsing fails, fallback to the original description
|
||||||
|
body = [
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"{subject}",
|
||||||
|
"weight": "bolder",
|
||||||
|
"size": "medium",
|
||||||
|
"wrap": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"{description}",
|
||||||
|
"isSubtle": True,
|
||||||
|
"wrap": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"Click [here]({query_link}) to check your query!",
|
||||||
|
"wrap": True,
|
||||||
|
"isSubtle": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "TextBlock",
|
||||||
|
"text": f"Click [here]({alert_link}) to check your alert!",
|
||||||
|
"wrap": True,
|
||||||
|
"isSubtle": True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"contentType": "application/vnd.microsoft.card.adaptive",
|
"contentType": "application/vnd.microsoft.card.adaptive",
|
||||||
@@ -44,44 +169,7 @@ class Webex(BaseDestination):
|
|||||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||||
"type": "AdaptiveCard",
|
"type": "AdaptiveCard",
|
||||||
"version": "1.0",
|
"version": "1.0",
|
||||||
"body": [
|
"body": body,
|
||||||
{
|
|
||||||
"type": "ColumnSet",
|
|
||||||
"columns": [
|
|
||||||
{
|
|
||||||
"type": "Column",
|
|
||||||
"width": 4,
|
|
||||||
"items": [
|
|
||||||
{
|
|
||||||
"type": "TextBlock",
|
|
||||||
"text": {subject},
|
|
||||||
"weight": "bolder",
|
|
||||||
"size": "medium",
|
|
||||||
"wrap": True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "TextBlock",
|
|
||||||
"text": {description},
|
|
||||||
"isSubtle": True,
|
|
||||||
"wrap": True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "TextBlock",
|
|
||||||
"text": f"Click [here]({query_link}) to check your query!",
|
|
||||||
"wrap": True,
|
|
||||||
"isSubtle": True,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "TextBlock",
|
|
||||||
"text": f"Click [here]({alert_link}) to check your alert!",
|
|
||||||
"wrap": True,
|
|
||||||
"isSubtle": True,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -116,6 +204,10 @@ class Webex(BaseDestination):
|
|||||||
|
|
||||||
# destinations is guaranteed to be a comma-separated string
|
# destinations is guaranteed to be a comma-separated string
|
||||||
for destination_id in destinations.split(","):
|
for destination_id in destinations.split(","):
|
||||||
|
destination_id = destination_id.strip() # Remove any leading or trailing whitespace
|
||||||
|
if not destination_id: # Check if the destination_id is empty or blank
|
||||||
|
continue # Skip to the next iteration if it's empty or blank
|
||||||
|
|
||||||
payload = deepcopy(template_payload)
|
payload = deepcopy(template_payload)
|
||||||
payload[payload_tag] = destination_id
|
payload[payload_tag] = destination_id
|
||||||
self.post_message(payload, headers)
|
self.post_message(payload, headers)
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ from redash.authentication.account import (
|
|||||||
)
|
)
|
||||||
from redash.handlers import routes
|
from redash.handlers import routes
|
||||||
from redash.handlers.base import json_response, org_scoped_rule
|
from redash.handlers.base import json_response, org_scoped_rule
|
||||||
|
from redash.version_check import get_latest_version
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -256,11 +257,15 @@ def number_format_config():
|
|||||||
|
|
||||||
def client_config():
|
def client_config():
|
||||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||||
client_config_inner = {
|
client_config = {
|
||||||
|
"newVersionAvailable": bool(get_latest_version()),
|
||||||
"version": __version__,
|
"version": __version__,
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
client_config_inner = {}
|
client_config = {}
|
||||||
|
|
||||||
|
if current_user.has_permission("admin") and current_org.get_setting("beacon_consent") is None:
|
||||||
|
client_config["showBeaconConsentMessage"] = True
|
||||||
|
|
||||||
defaults = {
|
defaults = {
|
||||||
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||||
@@ -280,12 +285,12 @@ def client_config():
|
|||||||
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||||
}
|
}
|
||||||
|
|
||||||
client_config_inner.update(defaults)
|
client_config.update(defaults)
|
||||||
client_config_inner.update({"basePath": base_href()})
|
client_config.update({"basePath": base_href()})
|
||||||
client_config_inner.update(date_time_format_config())
|
client_config.update(date_time_format_config())
|
||||||
client_config_inner.update(number_format_config())
|
client_config.update(number_format_config())
|
||||||
|
|
||||||
return client_config_inner
|
return client_config
|
||||||
|
|
||||||
|
|
||||||
def messages():
|
def messages():
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
from flask import g, redirect, render_template, request, url_for
|
from flask import g, redirect, render_template, request, url_for
|
||||||
from flask_login import login_user
|
from flask_login import login_user
|
||||||
from wtforms import Form, PasswordField, StringField, validators
|
from wtforms import BooleanField, Form, PasswordField, StringField, validators
|
||||||
from wtforms.fields.html5 import EmailField
|
from wtforms.fields.html5 import EmailField
|
||||||
|
|
||||||
from redash import settings
|
from redash import settings
|
||||||
from redash.authentication.org_resolving import current_org
|
from redash.authentication.org_resolving import current_org
|
||||||
from redash.handlers.base import routes
|
from redash.handlers.base import routes
|
||||||
from redash.models import Group, Organization, User, db
|
from redash.models import Group, Organization, User, db
|
||||||
|
from redash.tasks.general import subscribe
|
||||||
|
|
||||||
|
|
||||||
class SetupForm(Form):
|
class SetupForm(Form):
|
||||||
@@ -14,6 +15,8 @@ class SetupForm(Form):
|
|||||||
email = EmailField("Email Address", validators=[validators.Email()])
|
email = EmailField("Email Address", validators=[validators.Email()])
|
||||||
password = PasswordField("Password", validators=[validators.Length(6)])
|
password = PasswordField("Password", validators=[validators.Length(6)])
|
||||||
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
||||||
|
security_notifications = BooleanField()
|
||||||
|
newsletter = BooleanField()
|
||||||
|
|
||||||
|
|
||||||
def create_org(org_name, user_name, email, password):
|
def create_org(org_name, user_name, email, password):
|
||||||
@@ -54,6 +57,8 @@ def setup():
|
|||||||
return redirect("/")
|
return redirect("/")
|
||||||
|
|
||||||
form = SetupForm(request.form)
|
form = SetupForm(request.form)
|
||||||
|
form.newsletter.data = True
|
||||||
|
form.security_notifications.data = True
|
||||||
|
|
||||||
if request.method == "POST" and form.validate():
|
if request.method == "POST" and form.validate():
|
||||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||||
@@ -61,6 +66,10 @@ def setup():
|
|||||||
g.org = default_org
|
g.org = default_org
|
||||||
login_user(user)
|
login_user(user)
|
||||||
|
|
||||||
|
# signup to newsletter if needed
|
||||||
|
if form.newsletter.data or form.security_notifications:
|
||||||
|
subscribe.delay(form.data)
|
||||||
|
|
||||||
return redirect(url_for("redash.index", org_slug=None))
|
return redirect(url_for("redash.index", org_slug=None))
|
||||||
|
|
||||||
return render_template("setup.html", form=form)
|
return render_template("setup.html", form=form)
|
||||||
|
|||||||
@@ -969,6 +969,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
|
|
||||||
def evaluate(self):
|
def evaluate(self):
|
||||||
data = self.query_rel.latest_query_data.data if self.query_rel.latest_query_data else None
|
data = self.query_rel.latest_query_data.data if self.query_rel.latest_query_data else None
|
||||||
|
new_state = self.UNKNOWN_STATE
|
||||||
|
|
||||||
if data and data["rows"] and self.options["column"] in data["rows"][0]:
|
if data and data["rows"] and self.options["column"] in data["rows"][0]:
|
||||||
op = OPERATORS.get(self.options["op"], lambda v, t: False)
|
op = OPERATORS.get(self.options["op"], lambda v, t: False)
|
||||||
@@ -997,9 +998,8 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
|
|
||||||
threshold = self.options["value"]
|
threshold = self.options["value"]
|
||||||
|
|
||||||
new_state = next_state(op, value, threshold)
|
if value is not None:
|
||||||
else:
|
new_state = next_state(op, value, threshold)
|
||||||
new_state = self.UNKNOWN_STATE
|
|
||||||
|
|
||||||
return new_state
|
return new_state
|
||||||
|
|
||||||
|
|||||||
@@ -90,15 +90,26 @@ class Athena(BaseQueryRunner):
|
|||||||
"title": "Athena cost per Tb scanned (USD)",
|
"title": "Athena cost per Tb scanned (USD)",
|
||||||
"default": 5,
|
"default": 5,
|
||||||
},
|
},
|
||||||
|
"result_reuse_enable": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Reuse Athena query results",
|
||||||
|
},
|
||||||
|
"result_reuse_minutes": {
|
||||||
|
"type": "number",
|
||||||
|
"title": "Minutes to reuse Athena query results",
|
||||||
|
"default": 60,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"required": ["region", "s3_staging_dir"],
|
"required": ["region", "s3_staging_dir"],
|
||||||
"extra_options": ["glue", "catalog_ids", "cost_per_tb"],
|
"extra_options": ["glue", "catalog_ids", "cost_per_tb", "result_reuse_enable", "result_reuse_minutes"],
|
||||||
"order": [
|
"order": [
|
||||||
"region",
|
"region",
|
||||||
"s3_staging_dir",
|
"s3_staging_dir",
|
||||||
"schema",
|
"schema",
|
||||||
"work_group",
|
"work_group",
|
||||||
"cost_per_tb",
|
"cost_per_tb",
|
||||||
|
"result_reuse_enable",
|
||||||
|
"result_reuse_minutes",
|
||||||
],
|
],
|
||||||
"secret": ["aws_secret_key"],
|
"secret": ["aws_secret_key"],
|
||||||
}
|
}
|
||||||
@@ -247,6 +258,8 @@ class Athena(BaseQueryRunner):
|
|||||||
kms_key=self.configuration.get("kms_key", None),
|
kms_key=self.configuration.get("kms_key", None),
|
||||||
work_group=self.configuration.get("work_group", "primary"),
|
work_group=self.configuration.get("work_group", "primary"),
|
||||||
formatter=SimpleFormatter(),
|
formatter=SimpleFormatter(),
|
||||||
|
result_reuse_enable=self.configuration.get("result_reuse_enable", False),
|
||||||
|
result_reuse_minutes=self.configuration.get("result_reuse_minutes", 60),
|
||||||
**self._get_iam_credentials(user=user),
|
**self._get_iam_credentials(user=user),
|
||||||
).cursor()
|
).cursor()
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from base64 import b64decode
|
|||||||
from redash import settings
|
from redash import settings
|
||||||
from redash.query_runner import (
|
from redash.query_runner import (
|
||||||
TYPE_BOOLEAN,
|
TYPE_BOOLEAN,
|
||||||
|
TYPE_DATE,
|
||||||
TYPE_DATETIME,
|
TYPE_DATETIME,
|
||||||
TYPE_FLOAT,
|
TYPE_FLOAT,
|
||||||
TYPE_INTEGER,
|
TYPE_INTEGER,
|
||||||
@@ -37,6 +38,8 @@ types_map = {
|
|||||||
"BOOLEAN": TYPE_BOOLEAN,
|
"BOOLEAN": TYPE_BOOLEAN,
|
||||||
"STRING": TYPE_STRING,
|
"STRING": TYPE_STRING,
|
||||||
"TIMESTAMP": TYPE_DATETIME,
|
"TIMESTAMP": TYPE_DATETIME,
|
||||||
|
"DATETIME": TYPE_DATETIME,
|
||||||
|
"DATE": TYPE_DATE,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -301,7 +304,7 @@ class BigQuery(BaseQueryRunner):
|
|||||||
datasets = self._get_project_datasets(project_id)
|
datasets = self._get_project_datasets(project_id)
|
||||||
|
|
||||||
query_base = """
|
query_base = """
|
||||||
SELECT table_schema, table_name, field_path
|
SELECT table_schema, table_name, field_path, data_type
|
||||||
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
|
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
|
||||||
WHERE table_schema NOT IN ('information_schema')
|
WHERE table_schema NOT IN ('information_schema')
|
||||||
"""
|
"""
|
||||||
@@ -322,7 +325,7 @@ class BigQuery(BaseQueryRunner):
|
|||||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||||
if table_name not in schema:
|
if table_name not in schema:
|
||||||
schema[table_name] = {"name": table_name, "columns": []}
|
schema[table_name] = {"name": table_name, "columns": []}
|
||||||
schema[table_name]["columns"].append(row["field_path"])
|
schema[table_name]["columns"].append({"name": row["field_path"], "type": row["data_type"]})
|
||||||
|
|
||||||
return list(schema.values())
|
return list(schema.values())
|
||||||
|
|
||||||
|
|||||||
@@ -91,8 +91,8 @@ class BaseElasticSearch(BaseQueryRunner):
|
|||||||
|
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
self.server_url = self.configuration["server"]
|
self.server_url = self.configuration.get("server", "")
|
||||||
if self.server_url[-1] == "/":
|
if self.server_url and self.server_url[-1] == "/":
|
||||||
self.server_url = self.server_url[:-1]
|
self.server_url = self.server_url[:-1]
|
||||||
|
|
||||||
basic_auth_user = self.configuration.get("basic_auth_user", None)
|
basic_auth_user = self.configuration.get("basic_auth_user", None)
|
||||||
|
|||||||
@@ -188,7 +188,7 @@ class MongoDB(BaseQueryRunner):
|
|||||||
|
|
||||||
self.syntax = "json"
|
self.syntax = "json"
|
||||||
|
|
||||||
self.db_name = self.configuration["dbName"]
|
self.db_name = self.configuration.get("dbName", "")
|
||||||
|
|
||||||
self.is_replica_set = (
|
self.is_replica_set = (
|
||||||
True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||||
|
|||||||
@@ -55,12 +55,13 @@ class Script(BaseQueryRunner):
|
|||||||
def __init__(self, configuration):
|
def __init__(self, configuration):
|
||||||
super(Script, self).__init__(configuration)
|
super(Script, self).__init__(configuration)
|
||||||
|
|
||||||
|
path = self.configuration.get("path", "")
|
||||||
# If path is * allow any execution path
|
# If path is * allow any execution path
|
||||||
if self.configuration["path"] == "*":
|
if path == "*":
|
||||||
return
|
return
|
||||||
|
|
||||||
# Poor man's protection against running scripts from outside the scripts directory
|
# Poor man's protection against running scripts from outside the scripts directory
|
||||||
if self.configuration["path"].find("../") > -1:
|
if path.find("../") > -1:
|
||||||
raise ValueError("Scripts can only be run from the configured scripts directory")
|
raise ValueError("Scripts can only be run from the configured scripts directory")
|
||||||
|
|
||||||
def test_connection(self):
|
def test_connection(self):
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ class Sqlite(BaseSQLQueryRunner):
|
|||||||
def __init__(self, configuration):
|
def __init__(self, configuration):
|
||||||
super(Sqlite, self).__init__(configuration)
|
super(Sqlite, self).__init__(configuration)
|
||||||
|
|
||||||
self._dbpath = self.configuration["dbpath"]
|
self._dbpath = self.configuration.get("dbpath", "")
|
||||||
|
|
||||||
def _get_tables(self, schema):
|
def _get_tables(self, schema):
|
||||||
query_table = "select tbl_name from sqlite_master where type='table'"
|
query_table = "select tbl_name from sqlite_master where type='table'"
|
||||||
|
|||||||
@@ -413,6 +413,7 @@ PAGE_SIZE_OPTIONS = list(
|
|||||||
TABLE_CELL_MAX_JSON_SIZE = int(os.environ.get("REDASH_TABLE_CELL_MAX_JSON_SIZE", 50000))
|
TABLE_CELL_MAX_JSON_SIZE = int(os.environ.get("REDASH_TABLE_CELL_MAX_JSON_SIZE", 50000))
|
||||||
|
|
||||||
# Features:
|
# Features:
|
||||||
|
VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true"))
|
||||||
FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
|
FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
|
||||||
FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
|
FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
|
||||||
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(
|
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ HIDE_PLOTLY_MODE_BAR = parse_boolean(os.environ.get("HIDE_PLOTLY_MODE_BAR", "fal
|
|||||||
DISABLE_PUBLIC_URLS = parse_boolean(os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false"))
|
DISABLE_PUBLIC_URLS = parse_boolean(os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false"))
|
||||||
|
|
||||||
settings = {
|
settings = {
|
||||||
|
"beacon_consent": None,
|
||||||
"auth_password_login_enabled": PASSWORD_LOGIN_ENABLED,
|
"auth_password_login_enabled": PASSWORD_LOGIN_ENABLED,
|
||||||
"auth_saml_enabled": SAML_LOGIN_ENABLED,
|
"auth_saml_enabled": SAML_LOGIN_ENABLED,
|
||||||
"auth_saml_type": SAML_LOGIN_TYPE,
|
"auth_saml_type": SAML_LOGIN_TYPE,
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from redash.tasks.general import (
|
|||||||
record_event,
|
record_event,
|
||||||
send_mail,
|
send_mail,
|
||||||
sync_user_details,
|
sync_user_details,
|
||||||
|
version_check,
|
||||||
)
|
)
|
||||||
from redash.tasks.queries import (
|
from redash.tasks.queries import (
|
||||||
cleanup_query_results,
|
cleanup_query_results,
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from redash import mail, models, settings
|
|||||||
from redash.models import users
|
from redash.models import users
|
||||||
from redash.query_runner import NotSupported
|
from redash.query_runner import NotSupported
|
||||||
from redash.tasks.worker import Queue
|
from redash.tasks.worker import Queue
|
||||||
|
from redash.version_check import run_version_check
|
||||||
from redash.worker import get_job_logger, job
|
from redash.worker import get_job_logger, job
|
||||||
|
|
||||||
logger = get_job_logger(__name__)
|
logger = get_job_logger(__name__)
|
||||||
@@ -29,6 +30,27 @@ def record_event(raw_event):
|
|||||||
logger.exception("Failed posting to %s", hook)
|
logger.exception("Failed posting to %s", hook)
|
||||||
|
|
||||||
|
|
||||||
|
def version_check():
|
||||||
|
run_version_check()
|
||||||
|
|
||||||
|
|
||||||
|
@job("default")
|
||||||
|
def subscribe(form):
|
||||||
|
logger.info(
|
||||||
|
"Subscribing to: [security notifications=%s], [newsletter=%s]",
|
||||||
|
form["security_notifications"],
|
||||||
|
form["newsletter"],
|
||||||
|
)
|
||||||
|
data = {
|
||||||
|
"admin_name": form["name"],
|
||||||
|
"admin_email": form["email"],
|
||||||
|
"org_name": form["org_name"],
|
||||||
|
"security_notifications": form["security_notifications"],
|
||||||
|
"newsletter": form["newsletter"],
|
||||||
|
}
|
||||||
|
requests.post("https://version.redash.io/subscribe", json=data)
|
||||||
|
|
||||||
|
|
||||||
@job("emails")
|
@job("emails")
|
||||||
def send_mail(to, subject, html, text):
|
def send_mail(to, subject, html, text):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from rq_scheduler import Scheduler
|
|||||||
|
|
||||||
from redash import rq_redis_connection, settings
|
from redash import rq_redis_connection, settings
|
||||||
from redash.tasks.failure_report import send_aggregated_errors
|
from redash.tasks.failure_report import send_aggregated_errors
|
||||||
from redash.tasks.general import sync_user_details
|
from redash.tasks.general import sync_user_details, version_check
|
||||||
from redash.tasks.queries import (
|
from redash.tasks.queries import (
|
||||||
cleanup_query_results,
|
cleanup_query_results,
|
||||||
empty_schedules,
|
empty_schedules,
|
||||||
@@ -79,6 +79,9 @@ def periodic_job_definitions():
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if settings.VERSION_CHECK:
|
||||||
|
jobs.append({"func": version_check, "interval": timedelta(days=1)})
|
||||||
|
|
||||||
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
|
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
|
||||||
jobs.append({"func": cleanup_query_results, "interval": timedelta(minutes=5)})
|
jobs.append({"func": cleanup_query_results, "interval": timedelta(minutes=5)})
|
||||||
|
|
||||||
|
|||||||
@@ -42,6 +42,20 @@
|
|||||||
{{ render_field(form.email) }}
|
{{ render_field(form.email) }}
|
||||||
{{ render_field(form.password) }}
|
{{ render_field(form.password) }}
|
||||||
|
|
||||||
|
<div class="checkbox">
|
||||||
|
<label>
|
||||||
|
{{ form.security_notifications() }}
|
||||||
|
Subscribe to Security Notifications
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="checkbox">
|
||||||
|
<label>
|
||||||
|
{{ form.newsletter() }}
|
||||||
|
Subscribe to newsletter (version updates, no more than once a month)
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
<h4 class="m-t-25">General</h4>
|
<h4 class="m-t-25">General</h4>
|
||||||
|
|
||||||
{{ render_field(form.org_name, help_block="Used in email notifications and the UI.") }}
|
{{ render_field(form.org_name, help_block="Used in email notifications and the UI.") }}
|
||||||
|
|||||||
103
redash/version_check.py
Normal file
103
redash/version_check.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import semver
|
||||||
|
|
||||||
|
from redash import __version__ as current_version
|
||||||
|
from redash import redis_connection
|
||||||
|
from redash.models import Organization, db
|
||||||
|
|
||||||
|
REDIS_KEY = "new_version_available"
|
||||||
|
|
||||||
|
|
||||||
|
def usage_data():
|
||||||
|
counts_query = """
|
||||||
|
SELECT 'users_count' as name, count(0) as value
|
||||||
|
FROM users
|
||||||
|
WHERE disabled_at is null
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT 'queries_count' as name, count(0) as value
|
||||||
|
FROM queries
|
||||||
|
WHERE is_archived is false
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT 'alerts_count' as name, count(0) as value
|
||||||
|
FROM alerts
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT 'dashboards_count' as name, count(0) as value
|
||||||
|
FROM dashboards
|
||||||
|
WHERE is_archived is false
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT 'widgets_count' as name, count(0) as value
|
||||||
|
FROM widgets
|
||||||
|
WHERE visualization_id is not null
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT 'textbox_count' as name, count(0) as value
|
||||||
|
FROM widgets
|
||||||
|
WHERE visualization_id is null
|
||||||
|
"""
|
||||||
|
|
||||||
|
data_sources_query = "SELECT type, count(0) FROM data_sources GROUP by 1"
|
||||||
|
visualizations_query = "SELECT type, count(0) FROM visualizations GROUP by 1"
|
||||||
|
destinations_query = "SELECT type, count(0) FROM notification_destinations GROUP by 1"
|
||||||
|
|
||||||
|
data = {name: value for (name, value) in db.session.execute(counts_query)}
|
||||||
|
data["data_sources"] = {name: value for (name, value) in db.session.execute(data_sources_query)}
|
||||||
|
data["visualization_types"] = {name: value for (name, value) in db.session.execute(visualizations_query)}
|
||||||
|
data["destination_types"] = {name: value for (name, value) in db.session.execute(destinations_query)}
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def run_version_check():
|
||||||
|
logging.info("Performing version check.")
|
||||||
|
logging.info("Current version: %s", current_version)
|
||||||
|
|
||||||
|
data = {"current_version": current_version}
|
||||||
|
|
||||||
|
if Organization.query.first().get_setting("beacon_consent"):
|
||||||
|
data["usage"] = usage_data()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.post(
|
||||||
|
"https://version.redash.io/api/report?channel=stable",
|
||||||
|
json=data,
|
||||||
|
timeout=3.0,
|
||||||
|
)
|
||||||
|
latest_version = response.json()["release"]["version"]
|
||||||
|
|
||||||
|
_compare_and_update(latest_version)
|
||||||
|
except requests.RequestException:
|
||||||
|
logging.exception("Failed checking for new version.")
|
||||||
|
except (ValueError, KeyError):
|
||||||
|
logging.exception("Failed checking for new version (probably bad/non-JSON response).")
|
||||||
|
|
||||||
|
|
||||||
|
def reset_new_version_status():
|
||||||
|
latest_version = get_latest_version()
|
||||||
|
if latest_version:
|
||||||
|
_compare_and_update(latest_version)
|
||||||
|
|
||||||
|
|
||||||
|
def get_latest_version():
|
||||||
|
return redis_connection.get(REDIS_KEY)
|
||||||
|
|
||||||
|
|
||||||
|
def _compare_and_update(latest_version):
|
||||||
|
# TODO: support alpha channel (allow setting which channel to check & parse build number)
|
||||||
|
is_newer = semver.compare(current_version, latest_version) == -1
|
||||||
|
logging.info("Latest version: %s (newer: %s)", latest_version, is_newer)
|
||||||
|
|
||||||
|
if is_newer:
|
||||||
|
redis_connection.set(REDIS_KEY, latest_version)
|
||||||
|
else:
|
||||||
|
redis_connection.delete(REDIS_KEY)
|
||||||
@@ -261,15 +261,19 @@ def test_webex_notify_calls_requests_post():
|
|||||||
alert.name = "Test Alert"
|
alert.name = "Test Alert"
|
||||||
alert.custom_subject = "Test custom subject"
|
alert.custom_subject = "Test custom subject"
|
||||||
alert.custom_body = "Test custom body"
|
alert.custom_body = "Test custom body"
|
||||||
|
|
||||||
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||||
|
|
||||||
query = mock.Mock()
|
query = mock.Mock()
|
||||||
query.id = 1
|
query.id = 1
|
||||||
|
|
||||||
user = mock.Mock()
|
user = mock.Mock()
|
||||||
app = mock.Mock()
|
app = mock.Mock()
|
||||||
host = "https://localhost:5000"
|
host = "https://localhost:5000"
|
||||||
options = {"webex_bot_token": "abcd", "to_room_ids": "1234"}
|
options = {
|
||||||
|
"webex_bot_token": "abcd",
|
||||||
|
"to_room_ids": "1234,5678",
|
||||||
|
"to_person_emails": "example1@test.com,example2@test.com",
|
||||||
|
}
|
||||||
metadata = {"Scheduled": False}
|
metadata = {"Scheduled": False}
|
||||||
|
|
||||||
new_state = Alert.TRIGGERED_STATE
|
new_state = Alert.TRIGGERED_STATE
|
||||||
@@ -277,7 +281,7 @@ def test_webex_notify_calls_requests_post():
|
|||||||
|
|
||||||
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||||
mock_response = mock.Mock()
|
mock_response = mock.Mock()
|
||||||
mock_response.status_code = 204
|
mock_response.status_code = 200
|
||||||
mock_post.return_value = mock_response
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||||
@@ -285,13 +289,111 @@ def test_webex_notify_calls_requests_post():
|
|||||||
query_link = f"{host}/queries/{query.id}"
|
query_link = f"{host}/queries/{query.id}"
|
||||||
alert_link = f"{host}/alerts/{alert.id}"
|
alert_link = f"{host}/alerts/{alert.id}"
|
||||||
|
|
||||||
formatted_attachments = Webex.formatted_attachments_template(
|
expected_attachments = Webex.formatted_attachments_template(
|
||||||
|
alert.custom_subject, alert.custom_body, query_link, alert_link
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_payload_room = {
|
||||||
|
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||||
|
"attachments": expected_attachments,
|
||||||
|
"roomId": "1234",
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_payload_email = {
|
||||||
|
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||||
|
"attachments": expected_attachments,
|
||||||
|
"toPersonEmail": "example1@test.com",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check that requests.post was called for both roomId and toPersonEmail destinations
|
||||||
|
mock_post.assert_any_call(
|
||||||
|
destination.api_base_url,
|
||||||
|
json=expected_payload_room,
|
||||||
|
headers={"Authorization": "Bearer abcd"},
|
||||||
|
timeout=5.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_post.assert_any_call(
|
||||||
|
destination.api_base_url,
|
||||||
|
json=expected_payload_email,
|
||||||
|
headers={"Authorization": "Bearer abcd"},
|
||||||
|
timeout=5.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mock_response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_webex_notify_handles_blank_entries():
|
||||||
|
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||||
|
alert.id = 1
|
||||||
|
alert.name = "Test Alert"
|
||||||
|
alert.custom_subject = "Test custom subject"
|
||||||
|
alert.custom_body = "Test custom body"
|
||||||
|
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||||
|
|
||||||
|
query = mock.Mock()
|
||||||
|
query.id = 1
|
||||||
|
|
||||||
|
user = mock.Mock()
|
||||||
|
app = mock.Mock()
|
||||||
|
host = "https://localhost:5000"
|
||||||
|
options = {
|
||||||
|
"webex_bot_token": "abcd",
|
||||||
|
"to_room_ids": "",
|
||||||
|
"to_person_emails": "",
|
||||||
|
}
|
||||||
|
metadata = {"Scheduled": False}
|
||||||
|
|
||||||
|
new_state = Alert.TRIGGERED_STATE
|
||||||
|
destination = Webex(options)
|
||||||
|
|
||||||
|
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||||
|
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||||
|
|
||||||
|
# Ensure no API calls are made when destinations are blank
|
||||||
|
mock_post.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_webex_notify_handles_2d_array():
|
||||||
|
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||||
|
alert.id = 1
|
||||||
|
alert.name = "Test Alert"
|
||||||
|
alert.custom_subject = "Test custom subject"
|
||||||
|
alert.custom_body = "Test custom body with table [['Col1', 'Col2'], ['Val1', 'Val2']]"
|
||||||
|
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||||
|
|
||||||
|
query = mock.Mock()
|
||||||
|
query.id = 1
|
||||||
|
|
||||||
|
user = mock.Mock()
|
||||||
|
app = mock.Mock()
|
||||||
|
host = "https://localhost:5000"
|
||||||
|
options = {
|
||||||
|
"webex_bot_token": "abcd",
|
||||||
|
"to_room_ids": "1234",
|
||||||
|
}
|
||||||
|
metadata = {"Scheduled": False}
|
||||||
|
|
||||||
|
new_state = Alert.TRIGGERED_STATE
|
||||||
|
destination = Webex(options)
|
||||||
|
|
||||||
|
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||||
|
mock_response = mock.Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
|
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||||
|
|
||||||
|
query_link = f"{host}/queries/{query.id}"
|
||||||
|
alert_link = f"{host}/alerts/{alert.id}"
|
||||||
|
|
||||||
|
expected_attachments = Webex.formatted_attachments_template(
|
||||||
alert.custom_subject, alert.custom_body, query_link, alert_link
|
alert.custom_subject, alert.custom_body, query_link, alert_link
|
||||||
)
|
)
|
||||||
|
|
||||||
expected_payload = {
|
expected_payload = {
|
||||||
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||||
"attachments": formatted_attachments,
|
"attachments": expected_attachments,
|
||||||
"roomId": "1234",
|
"roomId": "1234",
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -302,7 +404,60 @@ def test_webex_notify_calls_requests_post():
|
|||||||
timeout=5.0,
|
timeout=5.0,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert mock_response.status_code == 204
|
assert mock_response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_webex_notify_handles_1d_array():
|
||||||
|
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||||
|
alert.id = 1
|
||||||
|
alert.name = "Test Alert"
|
||||||
|
alert.custom_subject = "Test custom subject"
|
||||||
|
alert.custom_body = "Test custom body with 1D array, however unlikely ['Col1', 'Col2']"
|
||||||
|
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||||
|
|
||||||
|
query = mock.Mock()
|
||||||
|
query.id = 1
|
||||||
|
|
||||||
|
user = mock.Mock()
|
||||||
|
app = mock.Mock()
|
||||||
|
host = "https://localhost:5000"
|
||||||
|
options = {
|
||||||
|
"webex_bot_token": "abcd",
|
||||||
|
"to_room_ids": "1234",
|
||||||
|
}
|
||||||
|
metadata = {"Scheduled": False}
|
||||||
|
|
||||||
|
new_state = Alert.TRIGGERED_STATE
|
||||||
|
destination = Webex(options)
|
||||||
|
|
||||||
|
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
|
||||||
|
mock_response = mock.Mock()
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
|
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||||
|
|
||||||
|
query_link = f"{host}/queries/{query.id}"
|
||||||
|
alert_link = f"{host}/alerts/{alert.id}"
|
||||||
|
|
||||||
|
expected_attachments = Webex.formatted_attachments_template(
|
||||||
|
alert.custom_subject, alert.custom_body, query_link, alert_link
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_payload = {
|
||||||
|
"markdown": alert.custom_subject + "\n" + alert.custom_body,
|
||||||
|
"attachments": expected_attachments,
|
||||||
|
"roomId": "1234",
|
||||||
|
}
|
||||||
|
|
||||||
|
mock_post.assert_called_once_with(
|
||||||
|
destination.api_base_url,
|
||||||
|
json=expected_payload,
|
||||||
|
headers={"Authorization": "Bearer abcd"},
|
||||||
|
timeout=5.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert mock_response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
def test_datadog_notify_calls_requests_post():
|
def test_datadog_notify_calls_requests_post():
|
||||||
|
|||||||
@@ -118,6 +118,10 @@ class TestAlertEvaluate(BaseTestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||||
|
|
||||||
|
def test_evaluate_return_unknown_when_value_is_none(self):
|
||||||
|
alert = self.create_alert(get_results(None))
|
||||||
|
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||||
|
|
||||||
|
|
||||||
class TestNextState(TestCase):
|
class TestNextState(TestCase):
|
||||||
def test_numeric_value(self):
|
def test_numeric_value(self):
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ Object {
|
|||||||
"columns": Array [
|
"columns": Array [
|
||||||
Object {
|
Object {
|
||||||
"alignContent": "right",
|
"alignContent": "right",
|
||||||
"allowHTML": true,
|
"allowHTML": false,
|
||||||
"allowSearch": false,
|
"allowSearch": false,
|
||||||
"booleanValues": Array [
|
"booleanValues": Array [
|
||||||
"false",
|
"false",
|
||||||
@@ -38,7 +38,7 @@ Object {
|
|||||||
"columns": Array [
|
"columns": Array [
|
||||||
Object {
|
Object {
|
||||||
"alignContent": "left",
|
"alignContent": "left",
|
||||||
"allowHTML": true,
|
"allowHTML": false,
|
||||||
"allowSearch": false,
|
"allowSearch": false,
|
||||||
"booleanValues": Array [
|
"booleanValues": Array [
|
||||||
"false",
|
"false",
|
||||||
@@ -71,7 +71,7 @@ Object {
|
|||||||
"columns": Array [
|
"columns": Array [
|
||||||
Object {
|
Object {
|
||||||
"alignContent": "left",
|
"alignContent": "left",
|
||||||
"allowHTML": true,
|
"allowHTML": false,
|
||||||
"allowSearch": false,
|
"allowSearch": false,
|
||||||
"booleanValues": Array [
|
"booleanValues": Array [
|
||||||
"false",
|
"false",
|
||||||
@@ -104,7 +104,7 @@ Object {
|
|||||||
"columns": Array [
|
"columns": Array [
|
||||||
Object {
|
Object {
|
||||||
"alignContent": "left",
|
"alignContent": "left",
|
||||||
"allowHTML": true,
|
"allowHTML": false,
|
||||||
"allowSearch": true,
|
"allowSearch": true,
|
||||||
"booleanValues": Array [
|
"booleanValues": Array [
|
||||||
"false",
|
"false",
|
||||||
@@ -137,7 +137,7 @@ Object {
|
|||||||
"columns": Array [
|
"columns": Array [
|
||||||
Object {
|
Object {
|
||||||
"alignContent": "left",
|
"alignContent": "left",
|
||||||
"allowHTML": true,
|
"allowHTML": false,
|
||||||
"allowSearch": false,
|
"allowSearch": false,
|
||||||
"booleanValues": Array [
|
"booleanValues": Array [
|
||||||
"false",
|
"false",
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ function getDefaultColumnsOptions(columns: any) {
|
|||||||
allowSearch: false,
|
allowSearch: false,
|
||||||
alignContent: getColumnContentAlignment(col.type),
|
alignContent: getColumnContentAlignment(col.type),
|
||||||
// `string` cell options
|
// `string` cell options
|
||||||
allowHTML: true,
|
allowHTML: false,
|
||||||
highlightLinks: false,
|
highlightLinks: false,
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10479,9 +10479,9 @@ nan@^2.12.1:
|
|||||||
integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==
|
integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==
|
||||||
|
|
||||||
nanoid@^3.3.6:
|
nanoid@^3.3.6:
|
||||||
version "3.3.6"
|
version "3.3.8"
|
||||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c"
|
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf"
|
||||||
integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==
|
integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==
|
||||||
|
|
||||||
nanomatch@^1.2.9:
|
nanomatch@^1.2.9:
|
||||||
version "1.2.13"
|
version "1.2.13"
|
||||||
|
|||||||
Reference in New Issue
Block a user