Compare commits

...

35 Commits

Author SHA1 Message Date
dependabot[bot]
af71e0ec13 Bump serialize-javascript from 6.0.1 to 6.0.2 in /viz-lib
Bumps [serialize-javascript](https://github.com/yahoo/serialize-javascript) from 6.0.1 to 6.0.2.
- [Release notes](https://github.com/yahoo/serialize-javascript/releases)
- [Commits](https://github.com/yahoo/serialize-javascript/compare/v6.0.1...v6.0.2)

---
updated-dependencies:
- dependency-name: serialize-javascript
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-03-05 14:32:00 +00:00
Tsuneo Yoshioka
594e2f24ef Upgrade plotly.js to version 2 to fix the UI crashing issue (#7359)
* Upgrade plotly.js to version 2

* Fix styling error reported by styled
2025-03-05 14:30:28 +00:00
github-actions[bot]
3275a9e459 Snapshot: 25.03.0-dev 2025-03-01 00:35:44 +00:00
Shunki
3bad8c8e8c TiDB: Exclude INFORMATION_SCHEMA (#7352)
Co-authored-by: snickerjp <snickerjp@gmail.com>
2025-02-28 11:09:46 +09:00
Tsuneo Yoshioka
d0af4499d6 Sanitize NaN, Infinite, -Infinite causing error when saving as PostgreSQL JSON #7339 (2nd try) (#7348)
* Sanitize NaN, Infinite, -Infinite causing error when saving as PostgreSQL JSON #7339 (2nd try)

* Move json nsanitaize to on the top of json_dumps

* Fix comment
2025-02-27 01:40:43 -08:00
Ran Benita
4357ea56ae Fix UnboundLocalError when checking alerts for query (#7346)
This fixes the following exception:

```
UnboundLocalError: local variable 'value_is_number' referenced before assignment
  File "rq/worker.py", line 1431, in perform_job
    rv = job.perform()
  File "rq/job.py", line 1280, in perform
    self._result = self._execute()
  File "rq/job.py", line 1317, in _execute
    result = self.func(*self.args, **self.kwargs)
  File "redash/tasks/alerts.py", line 36, in check_alerts_for_query
    new_state = alert.evaluate()
  File "redash/models/__init__.py", line 1002, in evaluate
    new_state = next_state(op, value, threshold)
  File "redash/models/__init__.py", line 928, in next_state
    elif not value_is_number and op not in [OPERATORS.get("!="), OPERATORS.get("=="), OPERATORS.get("equals")]:
```
2025-02-25 09:15:20 -05:00
Tsuneo Yoshioka
5df5ca87a2 add NULLS LAST option for Query order (#7341) 2025-02-25 10:58:48 +08:00
Tsuneo Yoshioka
8387fe6fcb Fix the issue that chart(scatter, line, bubble...) having same x-value have wrong y-value (#7330) 2025-02-18 20:04:12 +00:00
snickerjp
e95de2ee4c Update oracledb package to version 2.5.1 and adjust Python version compatibility (#7316) 2025-02-18 23:00:09 +09:00
Lee2532
71902e5933 FIX : redash docker image TAG (#7280)
Co-authored-by: snickerjp <snickerjp@gmail.com>
2025-02-15 01:38:23 +09:00
Tsuneo Yoshioka
53eab14cef Make autocomplete always available (#7326) 2025-02-13 15:25:39 -05:00
Eric Radman
925bb91d8e Use absolute path for image resources (#7322)
When MULTI_ORG is enabled, 'static/' resolves to '<org>/static/'
2025-02-12 08:37:40 -05:00
Tsuneo Yoshioka
ec2ca6f986 BigQuery: show column type on Schema Browser (#7257) 2025-02-05 18:25:39 +00:00
Matt Nelson
96ea0194e8 Fix errors in webex alert destination. Add formatting support for QUERY_RESULT_TABLE. (#7296)
* prevent text values in payload being detected as 'set' on send.
Webex send ERROR:: Object of type set is not JSON serializable

Signed-off-by: Matt Nelson <metheos@gmail.com>

* add support for formatted QUERY_RESULT_TABLE in webex card

Signed-off-by: Matt Nelson <metheos@gmail.com>

* don't try to send to blank destinations

Signed-off-by: Matt Nelson <metheos@gmail.com>

* fix handling of the encoded QUERY_RESULTS_TABLE text

Signed-off-by: Matt Nelson <metheos@gmail.com>

* re-sort imports for ruff

Signed-off-by: Matt Nelson <metheos@gmail.com>

* change formatter to black

Signed-off-by: Matt Nelson <metheos@gmail.com>

* Add additional tests for Webex notification handling

ensure blank entries are handled for room IDs and person emails.
ensure that the API is not called when no valid destinations are provided.
ensure proper attachment formatting for alerts containing 2D arrays.

Signed-off-by: Matt Nelson <metheos@gmail.com>

* Add test for Webex notification with 1D array handling

This commit introduces a new test case to verify that the Webex
notification function correctly handles a 1D array input in the alert body.
The test ensures that the expected payload is constructed properly and that
the requests.post method is called with the correct parameters.

Signed-off-by: Matt Nelson <metheos@gmail.com>

---------

Signed-off-by: Matt Nelson <metheos@gmail.com>
2025-02-04 11:05:13 +00:00
github-actions[bot]
2776992101 Snapshot: 25.02.0-dev 2025-02-01 00:33:52 +00:00
Arik Fraimovich
85f001982e GitHub Actions Workflow updates (#7298)
* Split out secrets requiring workflows

* Update target

* Update Cypress run command
2025-01-31 10:20:04 +02:00
Motoi Washida
d03a2c4096 Fix error in rehash DB migration with Elasticsearch queries (#7292)
Fixes #7272
2025-01-22 21:19:59 -05:00
SeongTae Jeong
8c5890482a Use ARM64 runners instead of virtualization for ARM64 image builds (#7291) 2025-01-19 16:00:19 +10:00
Ezra Odio
10ce280a96 Default to not allow HTML content in tables (#7064)
Co-authored-by: Ezra Odio <eodio@starfishstorage.com>
2025-01-15 10:09:24 -05:00
dependabot[bot]
0dd7ac3d2e Bump virtualenv from 20.25.0 to 20.26.6 (#7276) 2025-01-14 01:45:58 +00:00
github-actions[bot]
4ee53a9445 Snapshot: 25.01.0-dev 2025-01-01 00:35:12 +00:00
SeongTae Jeong
c08292d90e Use Codecov token (#7265) 2024-12-30 21:06:09 +00:00
SeongTae Jeong
3142131cdd Bump actions/upload-artifact from v3 to v4 (#7266)
Related: https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/
2024-12-30 15:31:03 -05:00
Daisuke Taniwaki
530c1a0734 Support result reuse in Athena data sources (#7202)
* Support result reuse

* Update pyathena to 2.25.2

* Separate options

* Regenerate the Poetry lock file

---------

Co-authored-by: SeongTae Jeong <seongtaejg@gmail.com>
2024-12-28 05:50:16 +09:00
dependabot[bot]
52dc1769a1 Bump jinja2 from 3.1.4 to 3.1.5 (#7262)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-27 13:55:21 +10:00
Eric Radman
b9583c0b48 Create workflow trigger for publishing release image (#7259)
Co-authored-by: Justin Clift <justin@postgresql.org>
2024-12-27 12:19:32 +10:00
Arik Fraimovich
89d7f54e90 Handle the case when query runner configuration is an empty dict. (#7258) 2024-12-24 09:42:39 -05:00
Tsuneo Yoshioka
d884da2b0b BigQuery: add date, datetime type mapping (#7252) 2024-12-18 14:24:45 +02:00
dependabot[bot]
f7d485082c Bump nanoid from 3.3.6 to 3.3.8 (#7249)
Bumps [nanoid](https://github.com/ai/nanoid) from 3.3.6 to 3.3.8.
- [Release notes](https://github.com/ai/nanoid/releases)
- [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ai/nanoid/compare/3.3.6...3.3.8)

---
updated-dependencies:
- dependency-name: nanoid
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-13 17:57:05 +09:00
Eric Radman
130ab1fe1a Update to paramiko-3.4.1 (#7240)
Solves the deprecation warning for TripleDES
Related: https://github.com/paramiko/paramiko/issues/2419
2024-12-07 11:23:45 +09:00
github-actions[bot]
2ff83679fe Snapshot: 24.12.0-dev 2024-12-01 00:40:40 +00:00
Eric Radman
de49b73855 Replace ptvsd with debugpy to match modern VS Code (#7234) 2024-11-27 08:19:05 +10:00
thiagogds
c12e68f5d1 Only evaluate the next state if there's a value (#7222)
I've experience this on my Redash in production. I'm not sure what can cause the value to exist, but be None. I guess it depends on the SQL query.

I followed the same idea of returning a self.UNKNOWN_STATE for cases that we can't know what's happening.
2024-11-26 12:57:34 -05:00
Eric Radman
baa9bbd505 Use head.sha for restyled checkout (#7227) 2024-11-22 10:34:16 +10:00
Arik Fraimovich
349cd5d031 Bring back version check & beacon reporting (#7211)
Co-authored-by: Restyled.io <commits@restyled.io>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2024-11-06 01:21:03 +00:00
60 changed files with 2168 additions and 3072 deletions

View File

@@ -3,7 +3,7 @@ on:
push:
branches:
- master
pull_request_target:
pull_request:
branches:
- master
env:
@@ -60,15 +60,17 @@ jobs:
mkdir -p /tmp/test-results/unit-tests
docker cp tests:/app/coverage.xml ./coverage.xml
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
# - name: Upload coverage reports to Codecov
# uses: codecov/codecov-action@v3
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
- name: Store Test Results
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: test-results
name: backend-test-results
path: /tmp/test-results
- name: Store Coverage Results
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: coverage
path: coverage.xml
@@ -94,9 +96,9 @@ jobs:
- name: Run Lint
run: yarn lint:ci
- name: Store Test Results
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: test-results
name: frontend-test-results
path: /tmp/test-results
frontend-unit-tests:
@@ -132,9 +134,9 @@ jobs:
COMPOSE_PROJECT_NAME: cypress
CYPRESS_INSTALL_BINARY: 0
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
# PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
# CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
# CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
steps:
- if: github.event.pull_request.mergeable == 'false'
name: Exit if PR is not mergeable
@@ -169,7 +171,7 @@ jobs:
- name: Copy Code Coverage Results
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
- name: Store Coverage Results
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: coverage
path: coverage

View File

@@ -4,6 +4,15 @@ on:
tags:
- '*-dev'
workflow_dispatch:
inputs:
dockerRepository:
description: 'Docker repository'
required: true
default: 'preview'
type: choice
options:
- preview
- redash
env:
NODE_VERSION: 18
@@ -30,7 +39,20 @@ jobs:
fi
build-docker-image:
runs-on: ubuntu-22.04
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
arch:
- amd64
- arm64
include:
- arch: amd64
os: ubuntu-22.04
- arch: arm64
os: ubuntu-22.04-arm
outputs:
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
needs:
- build-skip-check
if: needs.build-skip-check.outputs.skip == 'false'
@@ -45,11 +67,6 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'yarn'
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -60,6 +77,8 @@ jobs:
password: ${{ secrets.DOCKER_PASS }}
- name: Install Dependencies
env:
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
run: |
npm install --global --force yarn@1.22.22
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
@@ -72,23 +91,92 @@ jobs:
VERSION_TAG=$(jq -r .version package.json)
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
# TODO: We can use GitHub Actions's matrix option to reduce the build time.
- name: Build and push preview image to Docker Hub
id: build-preview
uses: docker/build-push-action@v4
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
with:
push: true
tags: |
redash/redash:preview
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
${{ vars.DOCKER_USER }}/redash
${{ vars.DOCKER_USER }}/preview
context: .
build-args: |
test_all_deps=true
cache-from: type=gha,scope=multi-platform
cache-to: type=gha,mode=max,scope=multi-platform
platforms: linux/amd64,linux/arm64
outputs: type=image,push-by-digest=true,push=true
cache-from: type=gha,scope=${{ matrix.arch }}
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
env:
DOCKER_CONTENT_TRUST: true
- name: Build and push release image to Docker Hub
id: build-release
uses: docker/build-push-action@v4
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
with:
tags: |
${{ vars.DOCKER_USER }}/redash:${{ steps.version.outputs.VERSION_TAG }}
context: .
build-args: |
test_all_deps=true
outputs: type=image,push-by-digest=true,push=true
cache-from: type=gha,scope=${{ matrix.arch }}
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
env:
DOCKER_CONTENT_TRUST: true
- name: "Failure: output container logs to console"
if: failure()
run: docker compose logs
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
digest="${{ steps.build-preview.outputs.digest}}"
else
digest="${{ steps.build-release.outputs.digest}}"
fi
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ matrix.arch }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
merge-docker-image:
runs-on: ubuntu-22.04
needs: build-docker-image
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASS }}
- name: Download digests
uses: actions/download-artifact@v4
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Create and push manifest for the preview image
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:preview \
$(printf '${{ vars.DOCKER_USER }}/redash:preview@sha256:%s ' *)
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
- name: Create and push manifest for the release image
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)

View File

@@ -34,7 +34,7 @@ clean:
clean-all: clean
docker image rm --force \
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
redash/redash:latest redis:7-alpine maildev/maildev:latest \
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
down:

View File

@@ -67,7 +67,7 @@ help() {
echo ""
echo "shell -- open shell"
echo "dev_server -- start Flask development server with debugger and auto reload"
echo "debug -- start Flask development server with remote debugger via ptvsd"
echo "debug -- start Flask development server with remote debugger via debugpy"
echo "create_db -- create database tables"
echo "manage -- CLI to manage redash"
echo "tests -- run tests"

View File

@@ -1,5 +1,6 @@
import React from "react";
import { clientConfig } from "@/services/auth";
import Link from "@/components/Link";
import { clientConfig, currentUser } from "@/services/auth";
import frontendVersion from "@/version.json";
export default function VersionInfo() {
@@ -9,6 +10,15 @@ export default function VersionInfo() {
Version: {clientConfig.version}
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
</div>
{clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
<div className="m-t-10">
{/* eslint-disable react/jsx-no-target-blank */}
<Link href="https://version.redash.io/" className="update-available" target="_blank" rel="noopener">
Update Available <i className="fa fa-external-link m-l-5" aria-hidden="true" />
<span className="sr-only">(opens in a new tab)</span>
</Link>
</div>
)}
</React.Fragment>
);
}

View File

@@ -0,0 +1,79 @@
import React, { useState } from "react";
import Card from "antd/lib/card";
import Button from "antd/lib/button";
import Typography from "antd/lib/typography";
import { clientConfig } from "@/services/auth";
import Link from "@/components/Link";
import HelpTrigger from "@/components/HelpTrigger";
import DynamicComponent from "@/components/DynamicComponent";
import OrgSettings from "@/services/organizationSettings";
const Text = Typography.Text;
function BeaconConsent() {
const [hide, setHide] = useState(false);
if (!clientConfig.showBeaconConsentMessage || hide) {
return null;
}
const hideConsentCard = () => {
clientConfig.showBeaconConsentMessage = false;
setHide(true);
};
const confirmConsent = (confirm) => {
let message = "🙏 Thank you.";
if (!confirm) {
message = "Settings Saved.";
}
OrgSettings.save({ beacon_consent: confirm }, message)
// .then(() => {
// // const settings = get(response, 'settings');
// // this.setState({ settings, formValues: { ...settings } });
// })
.finally(hideConsentCard);
};
return (
<DynamicComponent name="BeaconConsent">
<div className="m-t-10 tiled">
<Card
title={
<>
Would you be ok with sharing anonymous usage data with the Redash team?{" "}
<HelpTrigger type="USAGE_DATA_SHARING" />
</>
}
bordered={false}
>
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
<div className="m-t-5">
<ul>
<li> Number of users, queries, dashboards, alerts, widgets and visualizations.</li>
<li> Types of data sources, alert destinations and visualizations.</li>
</ul>
</div>
<Text>All data is aggregated and will never include any sensitive or private data.</Text>
<div className="m-t-5">
<Button type="primary" className="m-r-5" onClick={() => confirmConsent(true)}>
Yes
</Button>
<Button type="default" onClick={() => confirmConsent(false)}>
No
</Button>
</div>
<div className="m-t-15">
<Text type="secondary">
You can change this setting anytime from the <Link href="settings/general">Settings</Link> page.
</Text>
</div>
</Card>
</div>
</DynamicComponent>
);
}
export default BeaconConsent;

View File

@@ -23,6 +23,7 @@ export const TYPES = mapValues(
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
USAGE_DATA_SHARING: ["/open-source/admin-guide/usage-data", "Help: Anonymous Usage Data Sharing"],
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
@@ -100,7 +101,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
clearTimeout(this.iframeLoadingTimeout);
}
loadIframe = url => {
loadIframe = (url) => {
clearTimeout(this.iframeLoadingTimeout);
this.setState({ loading: true, error: false });
@@ -115,8 +116,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
clearTimeout(this.iframeLoadingTimeout);
};
onPostMessageReceived = event => {
if (!some(allowedDomains, domain => startsWith(event.origin, domain))) {
onPostMessageReceived = (event) => {
if (!some(allowedDomains, (domain) => startsWith(event.origin, domain))) {
return;
}
@@ -133,7 +134,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
return helpTriggerType ? helpTriggerType[0] : this.props.href;
};
openDrawer = e => {
openDrawer = (e) => {
// keep "open in new tab" behavior
if (!e.shiftKey && !e.ctrlKey && !e.metaKey) {
e.preventDefault();
@@ -143,7 +144,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
}
};
closeDrawer = event => {
closeDrawer = (event) => {
if (event) {
event.preventDefault();
}
@@ -160,7 +161,7 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
const tooltip = get(types, `${this.props.type}[1]`, this.props.title);
const className = cx("help-trigger", this.props.className);
const url = this.state.currentUrl;
const isAllowedDomain = some(allowedDomains, domain => startsWith(url || targetUrl, domain));
const isAllowedDomain = some(allowedDomains, (domain) => startsWith(url || targetUrl, domain));
const shouldRenderAsLink = this.props.renderAsLink || !isAllowedDomain;
return (
@@ -179,13 +180,15 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
)}
</>
) : null
}>
}
>
<Link
href={url || this.getUrl()}
className={className}
rel="noopener noreferrer"
target="_blank"
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}>
onClick={shouldRenderAsLink ? () => {} : this.openDrawer}
>
{this.props.children}
</Link>
</Tooltip>
@@ -196,7 +199,8 @@ export function helpTriggerWithTypes(types, allowedDomains = [], drawerClassName
visible={this.state.visible}
className={cx("help-drawer", drawerClassName)}
destroyOnClose
width={400}>
width={400}
>
<div className="drawer-wrapper">
<div className="drawer-menu">
{url && (

View File

@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
// DataSourcePreviewCard
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
return (
<PreviewCard {...props} imageUrl={imageUrl} title={title}>

View File

@@ -96,7 +96,7 @@ function EmptyState({
}, []);
// Show if `onboardingMode=false` or any requested step not completed
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
if (!shouldShow) {
return null;
@@ -181,7 +181,7 @@ function EmptyState({
];
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
return (
<div className="empty-state-wrapper">
@@ -196,7 +196,7 @@ function EmptyState({
</div>
<div className="empty-state__steps">
<h4>Let&apos;s get started</h4>
<ol>{stepsItems.map(item => item.node)}</ol>
<ol>{stepsItems.map((item) => item.node)}</ol>
{helpMessage}
</div>
</div>

View File

@@ -6,6 +6,7 @@ import Link from "@/components/Link";
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
import DynamicComponent from "@/components/DynamicComponent";
import BeaconConsent from "@/components/BeaconConsent";
import PlainButton from "@/components/PlainButton";
import { axios } from "@/services/axios";
@@ -30,7 +31,8 @@ function DeprecatedEmbedFeatureAlert() {
<Link
href="https://discuss.redash.io/t/support-for-parameters-in-embedded-visualizations/3337"
target="_blank"
rel="noopener noreferrer">
rel="noopener noreferrer"
>
Read more
</Link>
.
@@ -42,7 +44,7 @@ function DeprecatedEmbedFeatureAlert() {
function EmailNotVerifiedAlert() {
const verifyEmail = () => {
axios.post("verification_email/").then(data => {
axios.post("verification_email/").then((data) => {
notification.success(data.message);
});
};
@@ -88,6 +90,7 @@ export default function Home() {
</DynamicComponent>
<DynamicComponent name="HomeExtra" />
<DashboardAndQueryFavoritesList />
<BeaconConsent />
</div>
</div>
);
@@ -98,6 +101,6 @@ routes.register(
routeWithUserSession({
path: "/",
title: "Redash",
render: pageProps => <Home {...pageProps} />,
render: (pageProps) => <Home {...pageProps} />,
})
);

View File

@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
import React from "react";
export function QuerySourceTypeIcon(props) {
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
}
QuerySourceTypeIcon.propTypes = {

View File

@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
<div className="query-results-empty-state">
<div className="empty-state-content">
<div>
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
<img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
</div>
<h3>{title}</h3>
<div className="m-b-20">{message}</div>
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
const handleDelete = useCallback(
e => {
(e) => {
e.stopPropagation();
Modal.confirm({
title: "Delete Visualization",
@@ -111,7 +111,8 @@ export default function QueryVisualizationTabs({
className="add-visualization-button"
data-test="NewVisualization"
type="link"
onClick={() => onAddVisualization()}>
onClick={() => onAddVisualization()}
>
<i className="fa fa-plus" aria-hidden="true" />
<span className="m-l-5 hidden-xs">Add Visualization</span>
</Button>
@@ -119,7 +120,7 @@ export default function QueryVisualizationTabs({
}
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
const isMobile = useMedia({ maxWidth: 768 });
const [filters, setFilters] = useState([]);
@@ -132,9 +133,10 @@ export default function QueryVisualizationTabs({
data-test="QueryPageVisualizationTabs"
animated={false}
tabBarGutter={0}
onChange={activeKey => onChangeTab(+activeKey)}
destroyInactiveTabPane>
{orderedVisualizations.map(visualization => (
onChange={(activeKey) => onChangeTab(+activeKey)}
destroyInactiveTabPane
>
{orderedVisualizations.map((visualization) => (
<TabPane
key={`${visualization.id}`}
tab={
@@ -144,7 +146,8 @@ export default function QueryVisualizationTabs({
visualizationName={visualization.name}
onDelete={() => onDeleteVisualization(visualization.id)}
/>
}>
}
>
{queryResult ? (
<VisualizationRenderer
visualization={visualization}

View File

@@ -1,16 +1,11 @@
import { useCallback, useMemo, useState } from "react";
import { reduce } from "lodash";
import localOptions from "@/lib/localOptions";
function calculateTokensCount(schema) {
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
}
export default function useAutocompleteFlags(schema) {
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]);
const isAvailable = true;
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
const toggleAutocomplete = useCallback(state => {
const toggleAutocomplete = useCallback((state) => {
setIsEnabled(state);
localOptions.set("liveAutocomplete", state);
}, []);

View File

@@ -0,0 +1,40 @@
import React from "react";
import Form from "antd/lib/form";
import Checkbox from "antd/lib/checkbox";
import Skeleton from "antd/lib/skeleton";
import HelpTrigger from "@/components/HelpTrigger";
import DynamicComponent from "@/components/DynamicComponent";
import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
export default function BeaconConsentSettings(props) {
const { values, onChange, loading } = props;
return (
<DynamicComponent name="OrganizationSettings.BeaconConsentSettings" {...props}>
<Form.Item
label={
<span>
Anonymous Usage Data Sharing
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
</span>
}
>
{loading ? (
<Skeleton title={{ width: 300 }} paragraph={false} active />
) : (
<Checkbox
name="beacon_consent"
checked={values.beacon_consent}
onChange={(e) => onChange({ beacon_consent: e.target.checked })}
>
Help Redash improve by automatically sending anonymous usage data
</Checkbox>
)}
</Form.Item>
</DynamicComponent>
);
}
BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;

View File

@@ -4,6 +4,7 @@ import DynamicComponent from "@/components/DynamicComponent";
import FormatSettings from "./FormatSettings";
import PlotlySettings from "./PlotlySettings";
import FeatureFlagsSettings from "./FeatureFlagsSettings";
import BeaconConsentSettings from "./BeaconConsentSettings";
export default function GeneralSettings(props) {
return (
@@ -13,6 +14,7 @@ export default function GeneralSettings(props) {
<FormatSettings {...props} />
<PlotlySettings {...props} />
<FeatureFlagsSettings {...props} />
<BeaconConsentSettings {...props} />
</DynamicComponent>
);
}

View File

@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
export const SCHEMA_NOT_SUPPORTED = 1;
export const SCHEMA_LOAD_ERROR = 2;
export const IMG_ROOT = "static/images/db-logos";
export const IMG_ROOT = "/static/images/db-logos";
function mapSchemaColumnsToObject(columns) {
return map(columns, column => (isObject(column) ? column : { name: column }));
return map(columns, (column) => (isObject(column) ? column : { name: column }));
}
const DataSource = {
query: () => axios.get("api/data_sources"),
get: ({ id }) => axios.get(`api/data_sources/${id}`),
types: () => axios.get("api/data_sources/types"),
create: data => axios.post(`api/data_sources`, data),
save: data => axios.post(`api/data_sources/${data.id}`, data),
test: data => axios.post(`api/data_sources/${data.id}/test`),
create: (data) => axios.post(`api/data_sources`, data),
save: (data) => axios.post(`api/data_sources/${data.id}`, data),
test: (data) => axios.post(`api/data_sources/${data.id}/test`),
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
fetchSchema: (data, refresh = false) => {
const params = {};
@@ -27,15 +27,15 @@ const DataSource = {
return axios
.get(`api/data_sources/${data.id}/schema`, { params })
.then(data => {
.then((data) => {
if (has(data, "job")) {
return fetchDataFromJob(data.job.id).catch(error =>
return fetchDataFromJob(data.job.id).catch((error) =>
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
);
}
return has(data, "schema") ? data.schema : Promise.reject();
})
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
.then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
},
};

View File

@@ -63,7 +63,7 @@ function runCypressCI() {
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
} = process.env;
if (GITHUB_REPOSITORY === "getredash/redash") {
if (GITHUB_REPOSITORY === "getredash/redash" && process.env.CYPRESS_RECORD_KEY) {
process.env.CYPRESS_OPTIONS = "--record";
}

View File

@@ -3,36 +3,26 @@
* @param should Passed to should expression after plot points are captured
*/
export function assertPlotPreview(should = "exist") {
cy.getByTestId("VisualizationPreview")
.find("g.plot")
.should("exist")
.find("g.points")
.should(should);
cy.getByTestId("VisualizationPreview").find("g.overplot").should("exist").find("g.points").should(should);
}
export function createChartThroughUI(chartName, chartSpecificAssertionFn = () => {}) {
cy.getByTestId("NewVisualization").click();
cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.CHART");
cy.getByTestId("VisualizationName")
.clear()
.type(chartName);
cy.getByTestId("VisualizationName").clear().type(chartName);
chartSpecificAssertionFn();
cy.server();
cy.route("POST", "**/api/visualizations").as("SaveVisualization");
cy.getByTestId("EditVisualizationDialog")
.contains("button", "Save")
.click();
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
cy.getByTestId("QueryPageVisualizationTabs")
.contains("span", chartName)
.should("exist");
cy.getByTestId("QueryPageVisualizationTabs").contains("span", chartName).should("exist");
cy.wait("@SaveVisualization").should("have.property", "status", 200);
return cy.get("@SaveVisualization").then(xhr => {
return cy.get("@SaveVisualization").then((xhr) => {
const { id, name, options } = xhr.response.body;
return cy.wrap({ id, name, options });
});
@@ -42,19 +32,13 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
cy.getByTestId("Chart.GlobalSeriesType").should("exist");
cy.getByTestId("VisualizationEditor.Tabs.Series").click();
cy.getByTestId("VisualizationEditor")
.find("table")
.should("exist");
cy.getByTestId("VisualizationEditor").find("table").should("exist");
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
cy.getByTestId("VisualizationEditor")
.find("table")
.should("exist");
cy.getByTestId("VisualizationEditor").find("table").should("exist");
cy.getByTestId("VisualizationEditor.Tabs.DataLabels").click();
cy.getByTestId("VisualizationEditor")
.getByTestId("Chart.DataLabels.ShowDataLabels")
.should("exist");
cy.getByTestId("VisualizationEditor").getByTestId("Chart.DataLabels.ShowDataLabels").should("exist");
chartSpecificTabbedEditorAssertionFn();
@@ -63,39 +47,29 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) {
cy.getByTestId("VisualizationEditor.Tabs.XAxis").click();
cy.getByTestId("Chart.XAxis.Type")
.contains(".ant-select-selection-item", "Auto Detect")
.should("exist");
cy.getByTestId("Chart.XAxis.Type").contains(".ant-select-selection-item", "Auto Detect").should("exist");
cy.getByTestId("Chart.XAxis.Name")
.clear()
.type(xaxisLabel);
cy.getByTestId("Chart.XAxis.Name").clear().type(xaxisLabel);
cy.getByTestId("VisualizationEditor.Tabs.YAxis").click();
cy.getByTestId("Chart.LeftYAxis.Type")
.contains(".ant-select-selection-item", "Linear")
.should("exist");
cy.getByTestId("Chart.LeftYAxis.Type").contains(".ant-select-selection-item", "Linear").should("exist");
cy.getByTestId("Chart.LeftYAxis.Name")
.clear()
.type(yaxisLabel);
cy.getByTestId("Chart.LeftYAxis.Name").clear().type(yaxisLabel);
cy.getByTestId("Chart.LeftYAxis.TickFormat")
.clear()
.type("+");
cy.getByTestId("Chart.LeftYAxis.TickFormat").clear().type("+");
cy.getByTestId("VisualizationEditor.Tabs.General").click();
}
export function createDashboardWithCharts(title, chartGetters, widgetsAssertionFn = () => {}) {
cy.createDashboard(title).then(dashboard => {
cy.createDashboard(title).then((dashboard) => {
const dashboardUrl = `/dashboards/${dashboard.id}`;
const widgetGetters = chartGetters.map(chartGetter => `${chartGetter}Widget`);
const widgetGetters = chartGetters.map((chartGetter) => `${chartGetter}Widget`);
chartGetters.forEach((chartGetter, i) => {
const position = { autoHeight: false, sizeY: 8, sizeX: 3, col: (i % 2) * 3 };
cy.get(`@${chartGetter}`)
.then(chart => cy.addWidget(dashboard.id, chart.id, { position }))
.then((chart) => cy.addWidget(dashboard.id, chart.id, { position }))
.as(widgetGetters[i]);
});

View File

@@ -1,6 +1,6 @@
{
"name": "redash-client",
"version": "24.11.0-dev",
"version": "25.03.0-dev",
"description": "The frontend part of Redash.",
"main": "index.js",
"scripts": {

216
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "adal"
@@ -974,6 +974,41 @@ sqlalchemy = "*"
sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
superset = ["apache-superset (>=1.4.1)"]
[[package]]
name = "debugpy"
version = "1.8.9"
description = "An implementation of the Debug Adapter Protocol for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "debugpy-1.8.9-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:cfe1e6c6ad7178265f74981edf1154ffce97b69005212fbc90ca22ddfe3d017e"},
{file = "debugpy-1.8.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada7fb65102a4d2c9ab62e8908e9e9f12aed9d76ef44880367bc9308ebe49a0f"},
{file = "debugpy-1.8.9-cp310-cp310-win32.whl", hash = "sha256:c36856343cbaa448171cba62a721531e10e7ffb0abff838004701454149bc037"},
{file = "debugpy-1.8.9-cp310-cp310-win_amd64.whl", hash = "sha256:17c5e0297678442511cf00a745c9709e928ea4ca263d764e90d233208889a19e"},
{file = "debugpy-1.8.9-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:b74a49753e21e33e7cf030883a92fa607bddc4ede1aa4145172debc637780040"},
{file = "debugpy-1.8.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d22dacdb0e296966d7d74a7141aaab4bec123fa43d1a35ddcb39bf9fd29d70"},
{file = "debugpy-1.8.9-cp311-cp311-win32.whl", hash = "sha256:8138efff315cd09b8dcd14226a21afda4ca582284bf4215126d87342bba1cc66"},
{file = "debugpy-1.8.9-cp311-cp311-win_amd64.whl", hash = "sha256:ff54ef77ad9f5c425398efb150239f6fe8e20c53ae2f68367eba7ece1e96226d"},
{file = "debugpy-1.8.9-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:957363d9a7a6612a37458d9a15e72d03a635047f946e5fceee74b50d52a9c8e2"},
{file = "debugpy-1.8.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e565fc54b680292b418bb809f1386f17081d1346dca9a871bf69a8ac4071afe"},
{file = "debugpy-1.8.9-cp312-cp312-win32.whl", hash = "sha256:3e59842d6c4569c65ceb3751075ff8d7e6a6ada209ceca6308c9bde932bcef11"},
{file = "debugpy-1.8.9-cp312-cp312-win_amd64.whl", hash = "sha256:66eeae42f3137eb428ea3a86d4a55f28da9bd5a4a3d369ba95ecc3a92c1bba53"},
{file = "debugpy-1.8.9-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:957ecffff80d47cafa9b6545de9e016ae8c9547c98a538ee96ab5947115fb3dd"},
{file = "debugpy-1.8.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1efbb3ff61487e2c16b3e033bc8595aea578222c08aaf3c4bf0f93fadbd662ee"},
{file = "debugpy-1.8.9-cp313-cp313-win32.whl", hash = "sha256:7c4d65d03bee875bcb211c76c1d8f10f600c305dbd734beaed4077e902606fee"},
{file = "debugpy-1.8.9-cp313-cp313-win_amd64.whl", hash = "sha256:e46b420dc1bea64e5bbedd678148be512442bc589b0111bd799367cde051e71a"},
{file = "debugpy-1.8.9-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:472a3994999fe6c0756945ffa359e9e7e2d690fb55d251639d07208dbc37caea"},
{file = "debugpy-1.8.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365e556a4772d7d0d151d7eb0e77ec4db03bcd95f26b67b15742b88cacff88e9"},
{file = "debugpy-1.8.9-cp38-cp38-win32.whl", hash = "sha256:54a7e6d3014c408eb37b0b06021366ee985f1539e12fe49ca2ee0d392d9ceca5"},
{file = "debugpy-1.8.9-cp38-cp38-win_amd64.whl", hash = "sha256:8e99c0b1cc7bf86d83fb95d5ccdc4ad0586d4432d489d1f54e4055bcc795f693"},
{file = "debugpy-1.8.9-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:7e8b079323a56f719977fde9d8115590cb5e7a1cba2fcee0986ef8817116e7c1"},
{file = "debugpy-1.8.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6953b335b804a41f16a192fa2e7851bdcfd92173cbb2f9f777bb934f49baab65"},
{file = "debugpy-1.8.9-cp39-cp39-win32.whl", hash = "sha256:7e646e62d4602bb8956db88b1e72fe63172148c1e25c041e03b103a25f36673c"},
{file = "debugpy-1.8.9-cp39-cp39-win_amd64.whl", hash = "sha256:3d9755e77a2d680ce3d2c5394a444cf42be4a592caaf246dbfbdd100ffcf7ae5"},
{file = "debugpy-1.8.9-py2.py3-none-any.whl", hash = "sha256:cc37a6c9987ad743d9c3a14fa1b1a14b7e4e6041f9dd0c8abf8895fe7a97b899"},
{file = "debugpy-1.8.9.zip", hash = "sha256:1339e14c7d980407248f09824d1b25ff5c5616651689f1e0f0e51bdead3ea13e"},
]
[[package]]
name = "defusedxml"
version = "0.7.1"
@@ -1316,6 +1351,45 @@ files = [
[package.dependencies]
python-dateutil = ">=2.7"
[[package]]
name = "fsspec"
version = "2024.10.0"
description = "File-system specification"
optional = false
python-versions = ">=3.8"
files = [
{file = "fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871"},
{file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"},
]
[package.extras]
abfs = ["adlfs"]
adl = ["adlfs"]
arrow = ["pyarrow (>=1)"]
dask = ["dask", "distributed"]
dev = ["pre-commit", "ruff"]
doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"]
dropbox = ["dropbox", "dropboxdrivefs", "requests"]
full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
fuse = ["fusepy"]
gcs = ["gcsfs"]
git = ["pygit2"]
github = ["requests"]
gs = ["gcsfs"]
gui = ["panel"]
hdfs = ["pyarrow (>=1)"]
http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"]
libarchive = ["libarchive-c"]
oci = ["ocifs"]
s3 = ["s3fs"]
sftp = ["paramiko"]
smb = ["smbprotocol"]
ssh = ["paramiko"]
test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"]
test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"]
test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"]
tqdm = ["tqdm"]
[[package]]
name = "funcy"
version = "1.13"
@@ -1988,13 +2062,13 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
[[package]]
name = "jinja2"
version = "3.1.4"
version = "3.1.5"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
files = [
{file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
{file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
{file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
{file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
]
[package.dependencies]
@@ -2642,42 +2716,42 @@ et-xmlfile = "*"
[[package]]
name = "oracledb"
version = "2.1.2"
version = "2.5.1"
description = "Python interface to Oracle Database"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "oracledb-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ffaba9504c638c29129b484cf547accf750bd0f86df1ca6194646a4d2540691"},
{file = "oracledb-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d98deb1e3a500920f5460d457925f0c8cef8d037881fdbd16df1c4734453dd"},
{file = "oracledb-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bde2df672704fbe12ab0653f6e808b1ed62de28c6864b17fc3a1fcac9c1fd472"},
{file = "oracledb-2.1.2-cp310-cp310-win32.whl", hash = "sha256:3b3798a1220fc8736a37b9280d0ae4cdf263bb203fc6e2b3a82c33f9a2010702"},
{file = "oracledb-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:92620efd5eb0d23b252d75f2f2ff1deadf25f44546903e3283760cb276d524ed"},
{file = "oracledb-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b913a164e1830d0e955b88d97c5e4da4d2402f8a8b0d38febb6ad5a8ef9e4743"},
{file = "oracledb-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53827344c6d001f492aee0a3acb6c1b6c0f3030c2f5dc8cb86dc4f0bb4dd1ab"},
{file = "oracledb-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50225074841d5f9b281d620c012ced4b0946ff5a941c8b639be7babda5190709"},
{file = "oracledb-2.1.2-cp311-cp311-win32.whl", hash = "sha256:a043b4df2919411b787bcd24ffa4286249a11d05d29bb20bb076d108c3c6f777"},
{file = "oracledb-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:9edce208c26ee018e43b75323888743031be3e9f0c0e4221abf037129c12d949"},
{file = "oracledb-2.1.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:08aa313b801dda950918168d3962ba59a617adce143e0c2bf1ee9b847695faaa"},
{file = "oracledb-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de5c932b04d3bcdd22c71c0e5c5e1d16b6a3a2fc68dc472ee3a12e677461354c"},
{file = "oracledb-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d590caf39b1901bcba394fcda9815438faff0afaf374025f89ef5d65993d0a4"},
{file = "oracledb-2.1.2-cp312-cp312-win32.whl", hash = "sha256:1e3ffdfe76c97d1ca13a3fecf239c96d3889015bb5b775dc22b947108044b01e"},
{file = "oracledb-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c1eaf8c74bb6de5772de768f2f3f5eb935ab935c633d3a012ddff7e691a2073"},
{file = "oracledb-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2ee06e154e08cc5e4037855d74dc6e37dc054c91a7a1a372bb60d4442e2ed3d"},
{file = "oracledb-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a21d84aaf5dddab0cfa8ab7c23272c0295a5c796f212a4ce8a6b499643663dd"},
{file = "oracledb-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b337f7cf30753c3a32302fbc25ca80d7ff5049dd9333e681236a674a90c21caf"},
{file = "oracledb-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:b5d936763a9b26d32c4e460dbb346c2a962fcc98e6df33dd2d81fdc2eb26f1e4"},
{file = "oracledb-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0ea32b87b7202811d85082f10bf7789747ce45f195be4199c5611e7d76a79e78"},
{file = "oracledb-2.1.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:f94b22da87e051e3a8620d2b04d99e1cc9d9abb4da6736d6ae0ca436ba03fb86"},
{file = "oracledb-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:391034ee66717dba514e765263d08d18a2aa7badde373f82599b89e46fa3720a"},
{file = "oracledb-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a2d9891244b9b94465e30af8cc79380bbb41081c5dc0511cbc94cc250e9e26d"},
{file = "oracledb-2.1.2-cp38-cp38-win32.whl", hash = "sha256:9a9a6e0bf61952c2c82614b98fe896d2cda17d81ffca4527556e6607b10e3365"},
{file = "oracledb-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:d9a6447589f203ca846526c99a667537b099d54ddeff09d24f9da59bdcc8f98b"},
{file = "oracledb-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eb688dd1f8ea2038d17bc84fb651aa1e994b155d3cb8b8387df70ab2a7b4c4c"},
{file = "oracledb-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f22c31b894bb085a33d70e174c9bcd0abafc630c2c941ff0d630ee3852f1aa6"},
{file = "oracledb-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5bc03520b8bd4dbf2ac4d937d298a85a7208ffbeec738eea92ad7bb00e7134a"},
{file = "oracledb-2.1.2-cp39-cp39-win32.whl", hash = "sha256:5d4f6bd1036d7edbb96d8d31f0ca53696a013c00ac82fc19ac0ca374d2265b2c"},
{file = "oracledb-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:69bde9770392c1c859b1e1d767dbb9ca4c57e3f2946ca90c779d9402a7e96111"},
{file = "oracledb-2.1.2.tar.gz", hash = "sha256:3054bcc295d7378834ba7a5aceb865985e954915f9b07a843ea84c3824c6a0b2"},
{file = "oracledb-2.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:54ea7b4da179eb3fefad338685b44fed657a9cd733fb0bfc09d344cfb266355e"},
{file = "oracledb-2.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:05df7a5a61f4d26c986e235fae6f64a81afaac8f1dbef60e2e9ecf9236218e58"},
{file = "oracledb-2.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d17c80063375a5d87a7ab57c8343e5434a16ea74f7be3b56f9100300ef0b69d6"},
{file = "oracledb-2.5.1-cp310-cp310-win32.whl", hash = "sha256:51b3911ee822319e20f2e19d816351aac747591a59a0a96cf891c62c2a5c0c0d"},
{file = "oracledb-2.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:e4e884625117e50b619c93828affbcffa594029ef8c8b40205394990e6af65a8"},
{file = "oracledb-2.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:85318350fa4837b7b637e436fa5f99c17919d6329065e64d1e18e5a7cae52457"},
{file = "oracledb-2.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:676c221227159d9cee25030c56ff9782f330115cb86164d92d3360f55b07654b"},
{file = "oracledb-2.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e78c6de57b4b5df7f932337c57e59b62e34fc4527d2460c0cab10c2ab01825f8"},
{file = "oracledb-2.5.1-cp311-cp311-win32.whl", hash = "sha256:0d5974327a1957538a144b073367104cdf8bb39cf056940995b75cb099535589"},
{file = "oracledb-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:541bb5a107917b9d9eba1346318b42f8b6024e7dd3bef1451f0745364f03399c"},
{file = "oracledb-2.5.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:970a9420cc351d650cc6716122e9aa50cfb8c27f425ffc9d83651fd3edff6090"},
{file = "oracledb-2.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a6788c128af5a3a45689453fc4832f32b4a0dae2696d9917c7631a2e02865148"},
{file = "oracledb-2.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8778daa3f08639232341d802b95ca6da4c0c798c8530e4df331b3286d32e49d5"},
{file = "oracledb-2.5.1-cp312-cp312-win32.whl", hash = "sha256:a44613f3dfacb2b9462c3871ee333fa535fbd0ec21942e14019fcfd572487db0"},
{file = "oracledb-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:934d02da80bfc030c644c5c43fbe58119dc170f15b4dfdb6fe04c220a1f8730d"},
{file = "oracledb-2.5.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0374481329fa873a2af24eb12de4fd597c6c111e148065200562eb75ea0c6be7"},
{file = "oracledb-2.5.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66e885de106701d1f2a630d19e183e491e4f1ccb8d78855f60396ba15856fb66"},
{file = "oracledb-2.5.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcf446f6250d8edad5367ff03ad73dbbe672a2e4b060c51a774821dd723b0283"},
{file = "oracledb-2.5.1-cp313-cp313-win32.whl", hash = "sha256:b02b93199a7073e9b5687fe2dfa83d25ea102ab261c577f9d55820d5ef193dda"},
{file = "oracledb-2.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:173b6d132b230f0617380272181e14fc53aec65aaffe68b557a9b6040716a267"},
{file = "oracledb-2.5.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7d5efc94ce5bb657a5f43e2683e23cc4b4c53c4783e817759869472a113dac26"},
{file = "oracledb-2.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6919cb69638a7dda45380d6530b6f2f7fd21ea7bdf8d38936653f9ebc4f7e3d6"},
{file = "oracledb-2.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44f5eb220945a6e092975ebcb9afc3f1eb10420d04d6bfeace1207ba86d60431"},
{file = "oracledb-2.5.1-cp38-cp38-win32.whl", hash = "sha256:aa6ce0dfc64dc7b30bcf477f978538ba82fa7060ecd7a1b9227925b471ae3b50"},
{file = "oracledb-2.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:7a3115e4d445e3430d6f34083b7eed607309411f41472b66d145508f7b0c3770"},
{file = "oracledb-2.5.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8a2627a0d29390aaef7211c5b3f7182dfd8e76c969b39d57ee3e43c1057c6fe7"},
{file = "oracledb-2.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:730cd03e7fbf05acd32a221ead2a43020b3b91391597eaf728d724548f418b1b"},
{file = "oracledb-2.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42524b586733daa896f675acad8b9f2fc2f4380656d60a22a109a573861fc93"},
{file = "oracledb-2.5.1-cp39-cp39-win32.whl", hash = "sha256:7958c7796df9f8c97484768c88817dec5c6d49220fc4cccdfde12a1a883f3d46"},
{file = "oracledb-2.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:92e0d176e3c76a1916f4e34fc3d84994ad74cce6b8664656c4dbecb8fa7e8c37"},
{file = "oracledb-2.5.1.tar.gz", hash = "sha256:63d17ebb95f9129d0ab9386cb632c9e667e3be2c767278cc11a8e4585468de33"},
]
[package.dependencies]
@@ -2756,13 +2830,13 @@ test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"]
[[package]]
name = "paramiko"
version = "3.4.0"
version = "3.4.1"
description = "SSH2 protocol library"
optional = false
python-versions = ">=3.6"
files = [
{file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"},
{file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"},
{file = "paramiko-3.4.1-py3-none-any.whl", hash = "sha256:8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32"},
{file = "paramiko-3.4.1.tar.gz", hash = "sha256:8b15302870af7f6652f2e038975c1d2973f06046cb5d7d65355668b3ecbece0c"},
]
[package.dependencies]
@@ -3076,40 +3150,6 @@ pygments = "*"
all = ["black"]
ptipython = ["ipython"]
[[package]]
name = "ptvsd"
version = "4.3.2"
description = "Remote debugging server for Python support in Visual Studio and Visual Studio Code"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
files = [
{file = "ptvsd-4.3.2-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:22b699369a18ff28d4d1aa6a452739e50c7b7790cb16c6312d766e023c12fe27"},
{file = "ptvsd-4.3.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3f839fe91d9ddca0d6a3a0afd6a1c824be1768498a737ab9333d084c5c3f3591"},
{file = "ptvsd-4.3.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:70260b4591c07bff95566d49b6a5dc3051d8558035c43c847bad9a954def46bb"},
{file = "ptvsd-4.3.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d2662ec37ee049c0f8f2f9a378abeb7e570d9215c19eaf0a6d7189464195009f"},
{file = "ptvsd-4.3.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d9337ebba4d099698982e090b203e85670086c4b29cf1185b2e45cd353a8053e"},
{file = "ptvsd-4.3.2-cp34-cp34m-macosx_10_13_x86_64.whl", hash = "sha256:cf09fd4d90c4c42ddd9bf853290f1a80bc2128993a3923bd3b96b68cc1acd03f"},
{file = "ptvsd-4.3.2-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:ccc5c533135305709461f545feed5061c608714db38fa0f58e3f848a127b7fde"},
{file = "ptvsd-4.3.2-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:de5234bec74c47da668e1a1a21bcc9821af0cbb28b5153df78cd5abc744b29a2"},
{file = "ptvsd-4.3.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:c893fb9d1c2ef8f980cc00ced3fd90356f86d9f59b58ee97e0e7e622b8860f76"},
{file = "ptvsd-4.3.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2bbc121bce3608501998afbe742f02b80e7d26b8fecd38f78b903f22f52a81d9"},
{file = "ptvsd-4.3.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:fad06de012a78f277318d0c308dd3d7cc1f67167f3b2e1e2f7c6caf04c03440c"},
{file = "ptvsd-4.3.2-cp35-cp35m-win32.whl", hash = "sha256:92d26aa7c8f7ffe41cb4b50a00846027027fa17acdf2d9dd8c24de77b25166c6"},
{file = "ptvsd-4.3.2-cp35-cp35m-win_amd64.whl", hash = "sha256:eda10ecd43daacc180a6fbe524992be76a877c3559e2b78016b4ada8fec10273"},
{file = "ptvsd-4.3.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c01204e3f025c3f7252c79c1a8a028246d29e3ef339e1a01ddf652999f47bdea"},
{file = "ptvsd-4.3.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c97c71835dde7e67fc7b06398bee1c012559a0784ebda9cf8acaf176c7ae766c"},
{file = "ptvsd-4.3.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:612948a045fcf9c8931cd306972902440278f34de7ca684b49d4caeec9f1ec62"},
{file = "ptvsd-4.3.2-cp36-cp36m-win32.whl", hash = "sha256:72d114baa5737baf29c8068d1ccdd93cbb332d2030601c888eed0e3761b588d7"},
{file = "ptvsd-4.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:58508485a1609a495dd45829bd6d219303cf9edef5ca1f01a9ed8ffaa87f390c"},
{file = "ptvsd-4.3.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:20f48ffed42a6beb879c250d82662e175ad59cc46a29c95c6a4472ae413199c5"},
{file = "ptvsd-4.3.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b9970e3dc987eb2a6001af6c9d2f726dd6455cfc6d47e0f51925cbdee7ea2157"},
{file = "ptvsd-4.3.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1d3d82ecc82186d099992a748556e6e54037f5c5e4d3fc9bba3e2302354be0d4"},
{file = "ptvsd-4.3.2-cp37-cp37m-win32.whl", hash = "sha256:10745fbb788001959b4de405198d8bd5243611a88fb5a2e2c6800245bc0ddd74"},
{file = "ptvsd-4.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:90cbd082e7a9089664888d0d94aca760202f080133fca8f3fe65c48ed6b9e39d"},
{file = "ptvsd-4.3.2-py2.py3-none-any.whl", hash = "sha256:459137736068bb02515040b2ed2738169cb30d69a38e0fd5dffcba255f41e68d"},
{file = "ptvsd-4.3.2.zip", hash = "sha256:3b05c06018fdbce5943c50fb0baac695b5c11326f9e21a5266c854306bda28ab"},
]
[[package]]
name = "pure-sasl"
version = "0.6.2"
@@ -3151,23 +3191,25 @@ pyasn1 = ">=0.4.6,<0.6.0"
[[package]]
name = "pyathena"
version = "1.11.5"
version = "2.25.2"
description = "Python DB API 2.0 (PEP 249) client for Amazon Athena"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
python-versions = ">=3.7.1,<4.0.0"
files = [
{file = "PyAthena-1.11.5-py2.py3-none-any.whl", hash = "sha256:8cc5d40236993fe5241bb625e78d0a0a149e629b74569a9636b49168448a7ac8"},
{file = "PyAthena-1.11.5.tar.gz", hash = "sha256:86c0f4d10528de44fcd63222506949b010dff36ad57116e4c1274c1cfa9477d0"},
{file = "pyathena-2.25.2-py3-none-any.whl", hash = "sha256:df7855fec5cc675511431d7c72b814346ebd7e51ed32181ec95847154f79210b"},
{file = "pyathena-2.25.2.tar.gz", hash = "sha256:aebb8254dd7b2a450841ee3552bf443002a2deaed93fae0ae6f4258b5eb2d367"},
]
[package.dependencies]
boto3 = ">=1.4.4"
botocore = ">=1.5.52"
future = "*"
boto3 = ">=1.26.4"
botocore = ">=1.29.4"
fsspec = "*"
tenacity = ">=4.1.0"
[package.extras]
pandas = ["pandas (>=0.24.0)", "pyarrow (>=0.15.0)"]
arrow = ["pyarrow (>=7.0.0)"]
fastparquet = ["fastparquet (>=0.4.0)"]
pandas = ["pandas (>=1.3.0)"]
sqlalchemy = ["sqlalchemy (>=1.0.0,<2.0.0)"]
[[package]]
@@ -5112,13 +5154,13 @@ six = ">=1.10.0"
[[package]]
name = "virtualenv"
version = "20.25.0"
version = "20.26.6"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.7"
files = [
{file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
{file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
{file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"},
{file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"},
]
[package.dependencies]
@@ -5127,7 +5169,7 @@ filelock = ">=3.12.2,<4"
platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]]
@@ -5451,4 +5493,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8,<3.11"
content-hash = "00eb72e7f054606807de9f5dc727b446684a22ec7d450e18b5be9592ef017924"
content-hash = "93b13c8a960e148463fba93cfd826c0f3e7bd822bbda55af7ba708baead293df"

View File

@@ -12,7 +12,7 @@ force-exclude = '''
[tool.poetry]
name = "redash"
version = "24.11.0-dev"
version = "25.03.0-dev"
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
authors = ["Arik Fraimovich <arik@redash.io>"]
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
@@ -46,7 +46,7 @@ greenlet = "2.0.2"
gunicorn = "22.0.0"
httplib2 = "0.19.0"
itsdangerous = "2.1.2"
jinja2 = "3.1.4"
jinja2 = "3.1.5"
jsonschema = "3.1.1"
markupsafe = "2.1.1"
maxminddb-geolite2 = "2018.703"
@@ -86,6 +86,9 @@ wtforms = "2.2.1"
xlsxwriter = "1.2.2"
tzlocal = "4.3.1"
pyodbc = "5.1.0"
debugpy = "^1.8.9"
paramiko = "3.4.1"
oracledb = "2.5.1"
[tool.poetry.group.all_ds]
optional = true
@@ -111,12 +114,11 @@ nzalchemy = "^11.0.2"
nzpy = ">=1.15"
oauth2client = "4.1.3"
openpyxl = "3.0.7"
oracledb = "2.1.2"
pandas = "1.3.4"
phoenixdb = "0.7"
pinotdb = ">=0.4.5"
protobuf = "3.20.2"
pyathena = ">=1.5.0,<=1.11.5"
pyathena = "2.25.2"
pydgraph = "2.0.2"
pydruid = "0.5.7"
pyexasol = "0.12.0"
@@ -156,7 +158,6 @@ jwcrypto = "1.5.6"
mock = "5.0.2"
pre-commit = "3.3.3"
ptpython = "3.0.23"
ptvsd = "4.3.2"
pytest-cov = "4.1.0"
watchdog = "3.0.0"
ruff = "0.0.289"

View File

@@ -14,13 +14,14 @@ from redash.app import create_app # noqa
from redash.destinations import import_destinations
from redash.query_runner import import_query_runners
__version__ = "24.11.0-dev"
__version__ = "25.03.0-dev"
if os.environ.get("REMOTE_DEBUG"):
import ptvsd
import debugpy
ptvsd.enable_attach(address=("0.0.0.0", 5678))
debugpy.listen(("0.0.0.0", 5678))
debugpy.wait_for_client()
def setup_logging():

View File

@@ -36,10 +36,14 @@ def create_app():
from .metrics import request as request_metrics
from .models import db, users
from .utils import sentry
from .version_check import reset_new_version_status
sentry.init()
app = Redash()
# Check and update the cached version for use by the client
reset_new_version_status()
security.init_app(app)
request_metrics.init_app(app)
db.init_app(app)

View File

@@ -1,3 +1,5 @@
import html
import json
import logging
from copy import deepcopy
@@ -37,31 +39,83 @@ class Webex(BaseDestination):
@staticmethod
def formatted_attachments_template(subject, description, query_link, alert_link):
return [
{
"contentType": "application/vnd.microsoft.card.adaptive",
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.0",
"body": [
# Attempt to parse the description to find a 2D array
try:
# Extract the part of the description that looks like a JSON array
start_index = description.find("[")
end_index = description.rfind("]") + 1
json_array_str = description[start_index:end_index]
# Decode HTML entities
json_array_str = html.unescape(json_array_str)
# Replace single quotes with double quotes for valid JSON
json_array_str = json_array_str.replace("'", '"')
# Load the JSON array
data_array = json.loads(json_array_str)
# Check if it's a 2D array
if isinstance(data_array, list) and all(isinstance(i, list) for i in data_array):
# Create a table for the Adaptive Card
table_rows = []
for row in data_array:
table_rows.append(
{
"type": "ColumnSet",
"columns": [
{
"type": "Column",
"width": 4,
"items": [
{"type": "Column", "items": [{"type": "TextBlock", "text": str(item), "wrap": True}]}
for item in row
],
}
)
# Create the body of the card with the table
body = (
[
{
"type": "TextBlock",
"text": {subject},
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": {description},
"text": f"{description[:start_index]}",
"isSubtle": True,
"wrap": True,
},
]
+ table_rows
+ [
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
)
else:
# Fallback to the original description if no valid 2D array is found
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description}",
"isSubtle": True,
"wrap": True,
},
@@ -77,11 +131,45 @@ class Webex(BaseDestination):
"wrap": True,
"isSubtle": True,
},
],
]
except json.JSONDecodeError:
# If parsing fails, fallback to the original description
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
],
}
],
{
"type": "TextBlock",
"text": f"{description}",
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
return [
{
"contentType": "application/vnd.microsoft.card.adaptive",
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.0",
"body": body,
},
}
]
@@ -116,6 +204,10 @@ class Webex(BaseDestination):
# destinations is guaranteed to be a comma-separated string
for destination_id in destinations.split(","):
destination_id = destination_id.strip() # Remove any leading or trailing whitespace
if not destination_id: # Check if the destination_id is empty or blank
continue # Skip to the next iteration if it's empty or blank
payload = deepcopy(template_payload)
payload[payload_tag] = destination_id
self.post_message(payload, headers)

View File

@@ -15,6 +15,7 @@ from redash.authentication.account import (
)
from redash.handlers import routes
from redash.handlers.base import json_response, org_scoped_rule
from redash.version_check import get_latest_version
logger = logging.getLogger(__name__)
@@ -256,11 +257,15 @@ def number_format_config():
def client_config():
if not current_user.is_api_user() and current_user.is_authenticated:
client_config_inner = {
client_config = {
"newVersionAvailable": bool(get_latest_version()),
"version": __version__,
}
else:
client_config_inner = {}
client_config = {}
if current_user.has_permission("admin") and current_org.get_setting("beacon_consent") is None:
client_config["showBeaconConsentMessage"] = True
defaults = {
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
@@ -280,12 +285,12 @@ def client_config():
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
}
client_config_inner.update(defaults)
client_config_inner.update({"basePath": base_href()})
client_config_inner.update(date_time_format_config())
client_config_inner.update(number_format_config())
client_config.update(defaults)
client_config.update({"basePath": base_href()})
client_config.update(date_time_format_config())
client_config.update(number_format_config())
return client_config_inner
return client_config
def messages():

View File

@@ -1,12 +1,13 @@
from flask import g, redirect, render_template, request, url_for
from flask_login import login_user
from wtforms import Form, PasswordField, StringField, validators
from wtforms import BooleanField, Form, PasswordField, StringField, validators
from wtforms.fields.html5 import EmailField
from redash import settings
from redash.authentication.org_resolving import current_org
from redash.handlers.base import routes
from redash.models import Group, Organization, User, db
from redash.tasks.general import subscribe
class SetupForm(Form):
@@ -14,6 +15,8 @@ class SetupForm(Form):
email = EmailField("Email Address", validators=[validators.Email()])
password = PasswordField("Password", validators=[validators.Length(6)])
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
security_notifications = BooleanField()
newsletter = BooleanField()
def create_org(org_name, user_name, email, password):
@@ -54,6 +57,8 @@ def setup():
return redirect("/")
form = SetupForm(request.form)
form.newsletter.data = True
form.security_notifications.data = True
if request.method == "POST" and form.validate():
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
@@ -61,6 +66,10 @@ def setup():
g.org = default_org
login_user(user)
# signup to newsletter if needed
if form.newsletter.data or form.security_notifications:
subscribe.delay(form.data)
return redirect(url_for("redash.index", org_slug=None))
return render_template("setup.html", form=form)

View File

@@ -908,6 +908,7 @@ def next_state(op, value, threshold):
# boolean value is Python specific and most likely will be confusing to
# users.
value = str(value).lower()
value_is_number = False
else:
try:
value = float(value)
@@ -969,6 +970,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
def evaluate(self):
data = self.query_rel.latest_query_data.data if self.query_rel.latest_query_data else None
new_state = self.UNKNOWN_STATE
if data and data["rows"] and self.options["column"] in data["rows"][0]:
op = OPERATORS.get(self.options["op"], lambda v, t: False)
@@ -997,9 +999,8 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
threshold = self.options["value"]
if value is not None:
new_state = next_state(op, value, threshold)
else:
new_state = self.UNKNOWN_STATE
return new_state

View File

@@ -90,15 +90,26 @@ class Athena(BaseQueryRunner):
"title": "Athena cost per Tb scanned (USD)",
"default": 5,
},
"result_reuse_enable": {
"type": "boolean",
"title": "Reuse Athena query results",
},
"result_reuse_minutes": {
"type": "number",
"title": "Minutes to reuse Athena query results",
"default": 60,
},
},
"required": ["region", "s3_staging_dir"],
"extra_options": ["glue", "catalog_ids", "cost_per_tb"],
"extra_options": ["glue", "catalog_ids", "cost_per_tb", "result_reuse_enable", "result_reuse_minutes"],
"order": [
"region",
"s3_staging_dir",
"schema",
"work_group",
"cost_per_tb",
"result_reuse_enable",
"result_reuse_minutes",
],
"secret": ["aws_secret_key"],
}
@@ -247,6 +258,8 @@ class Athena(BaseQueryRunner):
kms_key=self.configuration.get("kms_key", None),
work_group=self.configuration.get("work_group", "primary"),
formatter=SimpleFormatter(),
result_reuse_enable=self.configuration.get("result_reuse_enable", False),
result_reuse_minutes=self.configuration.get("result_reuse_minutes", 60),
**self._get_iam_credentials(user=user),
).cursor()

View File

@@ -7,6 +7,7 @@ from base64 import b64decode
from redash import settings
from redash.query_runner import (
TYPE_BOOLEAN,
TYPE_DATE,
TYPE_DATETIME,
TYPE_FLOAT,
TYPE_INTEGER,
@@ -37,6 +38,8 @@ types_map = {
"BOOLEAN": TYPE_BOOLEAN,
"STRING": TYPE_STRING,
"TIMESTAMP": TYPE_DATETIME,
"DATETIME": TYPE_DATETIME,
"DATE": TYPE_DATE,
}
@@ -301,7 +304,7 @@ class BigQuery(BaseQueryRunner):
datasets = self._get_project_datasets(project_id)
query_base = """
SELECT table_schema, table_name, field_path
SELECT table_schema, table_name, field_path, data_type
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
WHERE table_schema NOT IN ('information_schema')
"""
@@ -322,7 +325,7 @@ class BigQuery(BaseQueryRunner):
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append(row["field_path"])
schema[table_name]["columns"].append({"name": row["field_path"], "type": row["data_type"]})
return list(schema.values())

View File

@@ -91,8 +91,8 @@ class BaseElasticSearch(BaseQueryRunner):
logger.setLevel(logging.DEBUG)
self.server_url = self.configuration["server"]
if self.server_url[-1] == "/":
self.server_url = self.configuration.get("server", "")
if self.server_url and self.server_url[-1] == "/":
self.server_url = self.server_url[:-1]
basic_auth_user = self.configuration.get("basic_auth_user", None)

View File

@@ -188,7 +188,7 @@ class MongoDB(BaseQueryRunner):
self.syntax = "json"
self.db_name = self.configuration["dbName"]
self.db_name = self.configuration.get("dbName", "")
self.is_replica_set = (
True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False

View File

@@ -152,7 +152,7 @@ class Mysql(BaseSQLQueryRunner):
col.table_name as table_name,
col.column_name as column_name
FROM `information_schema`.`columns` col
WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
WHERE LOWER(col.table_schema) NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
"""
results, error = self.run_query(query, None)

View File

@@ -55,12 +55,13 @@ class Script(BaseQueryRunner):
def __init__(self, configuration):
super(Script, self).__init__(configuration)
path = self.configuration.get("path", "")
# If path is * allow any execution path
if self.configuration["path"] == "*":
if path == "*":
return
# Poor man's protection against running scripts from outside the scripts directory
if self.configuration["path"].find("../") > -1:
if path.find("../") > -1:
raise ValueError("Scripts can only be run from the configured scripts directory")
def test_connection(self):

View File

@@ -28,7 +28,7 @@ class Sqlite(BaseSQLQueryRunner):
def __init__(self, configuration):
super(Sqlite, self).__init__(configuration)
self._dbpath = self.configuration["dbpath"]
self._dbpath = self.configuration.get("dbpath", "")
def _get_tables(self, schema):
query_table = "select tbl_name from sqlite_master where type='table'"

View File

@@ -413,6 +413,7 @@ PAGE_SIZE_OPTIONS = list(
TABLE_CELL_MAX_JSON_SIZE = int(os.environ.get("REDASH_TABLE_CELL_MAX_JSON_SIZE", 50000))
# Features:
VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true"))
FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(

View File

@@ -45,6 +45,7 @@ HIDE_PLOTLY_MODE_BAR = parse_boolean(os.environ.get("HIDE_PLOTLY_MODE_BAR", "fal
DISABLE_PUBLIC_URLS = parse_boolean(os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false"))
settings = {
"beacon_consent": None,
"auth_password_login_enabled": PASSWORD_LOGIN_ENABLED,
"auth_saml_enabled": SAML_LOGIN_ENABLED,
"auth_saml_type": SAML_LOGIN_TYPE,

View File

@@ -7,6 +7,7 @@ from redash.tasks.general import (
record_event,
send_mail,
sync_user_details,
version_check,
)
from redash.tasks.queries import (
cleanup_query_results,

View File

@@ -5,6 +5,7 @@ from redash import mail, models, settings
from redash.models import users
from redash.query_runner import NotSupported
from redash.tasks.worker import Queue
from redash.version_check import run_version_check
from redash.worker import get_job_logger, job
logger = get_job_logger(__name__)
@@ -29,6 +30,27 @@ def record_event(raw_event):
logger.exception("Failed posting to %s", hook)
def version_check():
run_version_check()
@job("default")
def subscribe(form):
logger.info(
"Subscribing to: [security notifications=%s], [newsletter=%s]",
form["security_notifications"],
form["newsletter"],
)
data = {
"admin_name": form["name"],
"admin_email": form["email"],
"org_name": form["org_name"],
"security_notifications": form["security_notifications"],
"newsletter": form["newsletter"],
}
requests.post("https://version.redash.io/subscribe", json=data)
@job("emails")
def send_mail(to, subject, html, text):
try:

View File

@@ -8,7 +8,7 @@ from rq_scheduler import Scheduler
from redash import rq_redis_connection, settings
from redash.tasks.failure_report import send_aggregated_errors
from redash.tasks.general import sync_user_details
from redash.tasks.general import sync_user_details, version_check
from redash.tasks.queries import (
cleanup_query_results,
empty_schedules,
@@ -79,6 +79,9 @@ def periodic_job_definitions():
},
]
if settings.VERSION_CHECK:
jobs.append({"func": version_check, "interval": timedelta(days=1)})
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
jobs.append({"func": cleanup_query_results, "interval": timedelta(minutes=5)})

View File

@@ -42,6 +42,20 @@
{{ render_field(form.email) }}
{{ render_field(form.password) }}
<div class="checkbox">
<label>
{{ form.security_notifications() }}
Subscribe to Security Notifications
</label>
</div>
<div class="checkbox">
<label>
{{ form.newsletter() }}
Subscribe to newsletter (version updates, no more than once a month)
</label>
</div>
<h4 class="m-t-25">General</h4>
{{ render_field(form.org_name, help_block="Used in email notifications and the UI.") }}

View File

@@ -6,6 +6,7 @@ import decimal
import hashlib
import io
import json
import math
import os
import random
import re
@@ -120,6 +121,17 @@ def json_loads(data, *args, **kwargs):
return json.loads(data, *args, **kwargs)
# Convert NaN, Inf, and -Inf to None, as they are not valid JSON values.
def _sanitize_data(data):
if isinstance(data, dict):
return {k: _sanitize_data(v) for k, v in data.items()}
if isinstance(data, list):
return [_sanitize_data(v) for v in data]
if isinstance(data, float) and (math.isnan(data) or math.isinf(data)):
return None
return data
def json_dumps(data, *args, **kwargs):
"""A custom JSON dumping function which passes all parameters to the
json.dumps function."""
@@ -128,7 +140,7 @@ def json_dumps(data, *args, **kwargs):
# Float value nan or inf in Python should be render to None or null in json.
# Using allow_nan = True will make Python render nan as NaN, leading to parse error in front-end
kwargs.setdefault("allow_nan", False)
return json.dumps(data, *args, **kwargs)
return json.dumps(_sanitize_data(data), *args, **kwargs)
def mustache_render(template, context=None, **kwargs):

View File

@@ -33,7 +33,7 @@ from sqlalchemy.orm import mapperlib
from sqlalchemy.orm.properties import ColumnProperty
from sqlalchemy.orm.query import _ColumnEntity
from sqlalchemy.orm.util import AliasedInsp
from sqlalchemy.sql.expression import asc, desc
from sqlalchemy.sql.expression import asc, desc, nullslast
def get_query_descriptor(query, entity, attr):
@@ -225,7 +225,7 @@ class QuerySorter:
def assign_order_by(self, entity, attr, func):
expr = get_query_descriptor(self.query, entity, attr)
if expr is not None:
return self.query.order_by(func(expr))
return self.query.order_by(nullslast(func(expr)))
if not self.silent:
raise QuerySorterException("Could not sort query with expression '%s'" % attr)
return self.query

103
redash/version_check.py Normal file
View File

@@ -0,0 +1,103 @@
import logging
import requests
import semver
from redash import __version__ as current_version
from redash import redis_connection
from redash.models import Organization, db
REDIS_KEY = "new_version_available"
def usage_data():
counts_query = """
SELECT 'users_count' as name, count(0) as value
FROM users
WHERE disabled_at is null
UNION ALL
SELECT 'queries_count' as name, count(0) as value
FROM queries
WHERE is_archived is false
UNION ALL
SELECT 'alerts_count' as name, count(0) as value
FROM alerts
UNION ALL
SELECT 'dashboards_count' as name, count(0) as value
FROM dashboards
WHERE is_archived is false
UNION ALL
SELECT 'widgets_count' as name, count(0) as value
FROM widgets
WHERE visualization_id is not null
UNION ALL
SELECT 'textbox_count' as name, count(0) as value
FROM widgets
WHERE visualization_id is null
"""
data_sources_query = "SELECT type, count(0) FROM data_sources GROUP by 1"
visualizations_query = "SELECT type, count(0) FROM visualizations GROUP by 1"
destinations_query = "SELECT type, count(0) FROM notification_destinations GROUP by 1"
data = {name: value for (name, value) in db.session.execute(counts_query)}
data["data_sources"] = {name: value for (name, value) in db.session.execute(data_sources_query)}
data["visualization_types"] = {name: value for (name, value) in db.session.execute(visualizations_query)}
data["destination_types"] = {name: value for (name, value) in db.session.execute(destinations_query)}
return data
def run_version_check():
logging.info("Performing version check.")
logging.info("Current version: %s", current_version)
data = {"current_version": current_version}
if Organization.query.first().get_setting("beacon_consent"):
data["usage"] = usage_data()
try:
response = requests.post(
"https://version.redash.io/api/report?channel=stable",
json=data,
timeout=3.0,
)
latest_version = response.json()["release"]["version"]
_compare_and_update(latest_version)
except requests.RequestException:
logging.exception("Failed checking for new version.")
except (ValueError, KeyError):
logging.exception("Failed checking for new version (probably bad/non-JSON response).")
def reset_new_version_status():
latest_version = get_latest_version()
if latest_version:
_compare_and_update(latest_version)
def get_latest_version():
return redis_connection.get(REDIS_KEY)
def _compare_and_update(latest_version):
# TODO: support alpha channel (allow setting which channel to check & parse build number)
is_newer = semver.compare(current_version, latest_version) == -1
logging.info("Latest version: %s (newer: %s)", latest_version, is_newer)
if is_newer:
redis_connection.set(REDIS_KEY, latest_version)
else:
redis_connection.delete(REDIS_KEY)

View File

@@ -261,15 +261,19 @@ def test_webex_notify_calls_requests_post():
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {"webex_bot_token": "abcd", "to_room_ids": "1234"}
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234,5678",
"to_person_emails": "example1@test.com,example2@test.com",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
@@ -277,7 +281,7 @@ def test_webex_notify_calls_requests_post():
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 204
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
@@ -285,13 +289,111 @@ def test_webex_notify_calls_requests_post():
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
formatted_attachments = Webex.formatted_attachments_template(
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload_room = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
expected_payload_email = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"toPersonEmail": "example1@test.com",
}
# Check that requests.post was called for both roomId and toPersonEmail destinations
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_room,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_email,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
def test_webex_notify_handles_blank_entries():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "",
"to_person_emails": "",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
destination.notify(alert, query, user, new_state, app, host, metadata, options)
# Ensure no API calls are made when destinations are blank
mock_post.assert_not_called()
def test_webex_notify_handles_2d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with table [['Col1', 'Col2'], ['Val1', 'Val2']]"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": formatted_attachments,
"attachments": expected_attachments,
"roomId": "1234",
}
@@ -302,7 +404,60 @@ def test_webex_notify_calls_requests_post():
timeout=5.0,
)
assert mock_response.status_code == 204
assert mock_response.status_code == 200
def test_webex_notify_handles_1d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with 1D array, however unlikely ['Col1', 'Col2']"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
mock_post.assert_called_once_with(
destination.api_base_url,
json=expected_payload,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
def test_datadog_notify_calls_requests_post():

View File

@@ -118,6 +118,10 @@ class TestAlertEvaluate(BaseTestCase):
)
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
def test_evaluate_return_unknown_when_value_is_none(self):
alert = self.create_alert(get_results(None))
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
class TestNextState(TestCase):
def test_numeric_value(self):

View File

@@ -0,0 +1,31 @@
from redash.utils import json_dumps, json_loads
from tests import BaseTestCase
class TestJsonDumps(BaseTestCase):
"""
NaN, Inf, and -Inf are sanitized to None.
"""
def test_data_with_nan_is_sanitized(self):
input_data = {
"columns": [
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
],
"rows": [{"_col0": 1.0, "_col1": float("nan"), "_col2": float("inf"), "_col3": float("-inf")}],
}
expected_output_data = {
"columns": [
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
],
"rows": [{"_col0": 1.0, "_col1": None, "_col2": None, "_col3": None}],
}
json_data = json_dumps(input_data)
actual_output_data = json_loads(json_data)
self.assertEquals(actual_output_data, expected_output_data)

View File

@@ -46,7 +46,7 @@
"@types/jest": "^26.0.18",
"@types/leaflet": "^1.5.19",
"@types/numeral": "0.0.28",
"@types/plotly.js": "^1.54.22",
"@types/plotly.js": "^2.35.2",
"@types/react": "^17.0.0",
"@types/react-dom": "^17.0.0",
"@types/tinycolor2": "^1.4.2",
@@ -91,7 +91,7 @@
"leaflet.markercluster": "^1.1.0",
"lodash": "^4.17.10",
"numeral": "^2.0.6",
"plotly.js": "1.58.5",
"plotly.js": "2.35.3",
"react-pivottable": "^0.9.0",
"react-sortable-hoc": "^1.10.1",
"tinycolor2": "^1.4.1",

View File

@@ -27,11 +27,13 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null

View File

@@ -30,11 +30,13 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
@@ -42,6 +44,7 @@
"yaxis2": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null,

View File

@@ -25,18 +25,21 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
},
"hoverlabel": {
"namelength": -1
}
},
"hovermode": "x"
}
}
}

View File

@@ -28,11 +28,13 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
@@ -40,6 +42,7 @@
"yaxis2": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null,
@@ -48,7 +51,8 @@
},
"hoverlabel": {
"namelength": -1
}
},
"hovermode": "x"
}
}
}

View File

@@ -24,18 +24,21 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
},
"hoverlabel": {
"namelength": -1
}
},
"hovermode": "x"
}
}
}

View File

@@ -23,18 +23,21 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
},
"hoverlabel": {
"namelength": -1
}
},
"hovermode": "x"
}
}
}

View File

@@ -10,6 +10,7 @@ import { prepareCustomChartData, createCustomChartRenderer } from "./customChart
// @ts-expect-error ts-migrate(2339) FIXME: Property 'setPlotConfig' does not exist on type 't... Remove this comment to see the full error message
Plotly.setPlotConfig({
modeBarButtonsToRemove: ["sendDataToCloud"],
modeBarButtonsToAdd: ["togglespikelines", "v1hovermode"],
});
export {

View File

@@ -99,8 +99,8 @@ function prepareSeries(series: any, options: any, numSeries: any, additionalOpti
};
const sourceData = new Map();
const labelsValuesMap = new Map();
const xValues: any[] = [];
const yValues: any[] = [];
const yErrorValues: any = [];
each(data, row => {
@@ -108,27 +108,20 @@ function prepareSeries(series: any, options: any, numSeries: any, additionalOpti
const y = cleanYValue(row.y, seriesYAxis === "y2" ? options.yAxis[1].type : options.yAxis[0].type); // depends on series type!
const yError = cleanNumber(row.yError); // always number
const size = cleanNumber(row.size); // always number
if (labelsValuesMap.has(x)) {
labelsValuesMap.set(x, labelsValuesMap.get(x) + y);
} else {
labelsValuesMap.set(x, y);
}
const aggregatedY = labelsValuesMap.get(x);
sourceData.set(x, {
x,
y: aggregatedY,
y,
yError,
size,
yPercent: null, // will be updated later
row,
});
xValues.push(x);
yValues.push(y);
yErrorValues.push(yError);
});
const xValues = Array.from(labelsValuesMap.keys());
const yValues = Array.from(labelsValuesMap.values());
const plotlySeries = {
visible: true,
hoverinfo: hoverInfoPattern,

View File

@@ -21,7 +21,7 @@ function prepareXAxis(axisOptions: any, additionalOptions: any) {
title: getAxisTitle(axisOptions),
type: getAxisScaleType(axisOptions),
automargin: true,
tickformat: axisOptions.tickFormat,
tickformat: axisOptions.tickFormat ?? null,
};
if (additionalOptions.sortX && axis.type === "category") {
@@ -49,7 +49,7 @@ function prepareYAxis(axisOptions: any) {
automargin: true,
autorange: true,
range: null,
tickformat: axisOptions.tickFormat,
tickformat: axisOptions.tickFormat ?? null,
};
}
@@ -109,7 +109,7 @@ function prepareBoxLayout(layout: any, options: any, data: any) {
}
export default function prepareLayout(element: any, options: any, data: any) {
const layout = {
const layout: any = {
margin: { l: 10, r: 10, b: 5, t: 20, pad: 4 },
// plot size should be at least 5x5px
width: Math.max(5, Math.floor(element.offsetWidth)),
@@ -124,6 +124,10 @@ export default function prepareLayout(element: any, options: any, data: any) {
},
};
if (["line", "area", "column"].includes(options.globalSeriesType)) {
layout.hovermode = options.swappedAxes ? 'y' : 'x';
}
switch (options.globalSeriesType) {
case "pie":
return preparePieLayout(layout, options, data);

View File

@@ -5,7 +5,7 @@ Object {
"columns": Array [
Object {
"alignContent": "right",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -38,7 +38,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -71,7 +71,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -104,7 +104,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": true,
"booleanValues": Array [
"false",
@@ -137,7 +137,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",

View File

@@ -54,7 +54,7 @@ function getDefaultColumnsOptions(columns: any) {
allowSearch: false,
alignContent: getColumnContentAlignment(col.type),
// `string` cell options
allowHTML: true,
allowHTML: false,
highlightLinks: false,
}));
}

File diff suppressed because it is too large Load Diff

1899
yarn.lock

File diff suppressed because it is too large Load Diff