Compare commits

...

13 Commits

Author SHA1 Message Date
snickerjp
008d466d34 Merge branch 'master' into dependabot/pip/snowflake-connector-python-3.13.1 2025-02-15 01:41:43 +09:00
Lee2532
71902e5933 FIX : redash docker image TAG (#7280)
Co-authored-by: snickerjp <snickerjp@gmail.com>
2025-02-15 01:38:23 +09:00
Tsuneo Yoshioka
53eab14cef Make autocomplete always available (#7326) 2025-02-13 15:25:39 -05:00
Eric Radman
925bb91d8e Use absolute path for image resources (#7322)
When MULTI_ORG is enabled, 'static/' resolves to '<org>/static/'
2025-02-12 08:37:40 -05:00
Tsuneo Yoshioka
ec2ca6f986 BigQuery: show column type on Schema Browser (#7257) 2025-02-05 18:25:39 +00:00
Matt Nelson
96ea0194e8 Fix errors in webex alert destination. Add formatting support for QUERY_RESULT_TABLE. (#7296)
* prevent text values in payload being detected as 'set' on send.
Webex send ERROR:: Object of type set is not JSON serializable

Signed-off-by: Matt Nelson <metheos@gmail.com>

* add support for formatted QUERY_RESULT_TABLE in webex card

Signed-off-by: Matt Nelson <metheos@gmail.com>

* don't try to send to blank destinations

Signed-off-by: Matt Nelson <metheos@gmail.com>

* fix handling of the encoded QUERY_RESULTS_TABLE text

Signed-off-by: Matt Nelson <metheos@gmail.com>

* re-sort imports for ruff

Signed-off-by: Matt Nelson <metheos@gmail.com>

* change formatter to black

Signed-off-by: Matt Nelson <metheos@gmail.com>

* Add additional tests for Webex notification handling

ensure blank entries are handled for room IDs and person emails.
ensure that the API is not called when no valid destinations are provided.
ensure proper attachment formatting for alerts containing 2D arrays.

Signed-off-by: Matt Nelson <metheos@gmail.com>

* Add test for Webex notification with 1D array handling

This commit introduces a new test case to verify that the Webex
notification function correctly handles a 1D array input in the alert body.
The test ensures that the expected payload is constructed properly and that
the requests.post method is called with the correct parameters.

Signed-off-by: Matt Nelson <metheos@gmail.com>

---------

Signed-off-by: Matt Nelson <metheos@gmail.com>
2025-02-04 11:05:13 +00:00
dependabot[bot]
2566229b0e Bump snowflake-connector-python from 3.12.3 to 3.13.1
Bumps [snowflake-connector-python](https://github.com/snowflakedb/snowflake-connector-python) from 3.12.3 to 3.13.1.
- [Release notes](https://github.com/snowflakedb/snowflake-connector-python/releases)
- [Commits](https://github.com/snowflakedb/snowflake-connector-python/compare/v3.12.3...v3.13.1)

---
updated-dependencies:
- dependency-name: snowflake-connector-python
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-01 00:36:43 +00:00
github-actions[bot]
2776992101 Snapshot: 25.02.0-dev 2025-02-01 00:33:52 +00:00
Arik Fraimovich
85f001982e GitHub Actions Workflow updates (#7298)
* Split out secrets requiring workflows

* Update target

* Update Cypress run command
2025-01-31 10:20:04 +02:00
Motoi Washida
d03a2c4096 Fix error in rehash DB migration with Elasticsearch queries (#7292)
Fixes #7272
2025-01-22 21:19:59 -05:00
SeongTae Jeong
8c5890482a Use ARM64 runners instead of virtualization for ARM64 image builds (#7291) 2025-01-19 16:00:19 +10:00
Ezra Odio
10ce280a96 Default to not allow HTML content in tables (#7064)
Co-authored-by: Ezra Odio <eodio@starfishstorage.com>
2025-01-15 10:09:24 -05:00
dependabot[bot]
0dd7ac3d2e Bump virtualenv from 20.25.0 to 20.26.6 (#7276) 2025-01-14 01:45:58 +00:00
20 changed files with 455 additions and 148 deletions

View File

@@ -3,7 +3,7 @@ on:
push:
branches:
- master
pull_request_target:
pull_request:
branches:
- master
env:
@@ -60,10 +60,10 @@ jobs:
mkdir -p /tmp/test-results/unit-tests
docker cp tests:/app/coverage.xml ./coverage.xml
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
# - name: Upload coverage reports to Codecov
# uses: codecov/codecov-action@v3
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
- name: Store Test Results
uses: actions/upload-artifact@v4
with:
@@ -134,9 +134,9 @@ jobs:
COMPOSE_PROJECT_NAME: cypress
CYPRESS_INSTALL_BINARY: 0
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
# PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
# CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
# CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
steps:
- if: github.event.pull_request.mergeable == 'false'
name: Exit if PR is not mergeable

View File

@@ -39,7 +39,20 @@ jobs:
fi
build-docker-image:
runs-on: ubuntu-22.04
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
arch:
- amd64
- arm64
include:
- arch: amd64
os: ubuntu-22.04
- arch: arm64
os: ubuntu-22.04-arm
outputs:
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
needs:
- build-skip-check
if: needs.build-skip-check.outputs.skip == 'false'
@@ -54,11 +67,6 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'yarn'
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -69,6 +77,8 @@ jobs:
password: ${{ secrets.DOCKER_PASS }}
- name: Install Dependencies
env:
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
run: |
npm install --global --force yarn@1.22.22
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
@@ -81,40 +91,92 @@ jobs:
VERSION_TAG=$(jq -r .version package.json)
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
# TODO: We can use GitHub Actions's matrix option to reduce the build time.
- name: Build and push preview image to Docker Hub
id: build-preview
uses: docker/build-push-action@v4
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
with:
push: true
tags: |
redash/redash:preview
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
${{ vars.DOCKER_USER }}/redash
${{ vars.DOCKER_USER }}/preview
context: .
build-args: |
test_all_deps=true
cache-from: type=gha,scope=multi-platform
cache-to: type=gha,mode=max,scope=multi-platform
platforms: linux/amd64,linux/arm64
outputs: type=image,push-by-digest=true,push=true
cache-from: type=gha,scope=${{ matrix.arch }}
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
env:
DOCKER_CONTENT_TRUST: true
- name: Build and push release image to Docker Hub
id: build-release
uses: docker/build-push-action@v4
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
with:
push: true
tags: |
redash/redash:${{ steps.version.outputs.VERSION_TAG }}
${{ vars.DOCKER_USER }}/redash:${{ steps.version.outputs.VERSION_TAG }}
context: .
build-args: |
test_all_deps=true
cache-from: type=gha,scope=multi-platform
cache-to: type=gha,mode=max,scope=multi-platform
platforms: linux/amd64,linux/arm64
outputs: type=image,push-by-digest=true,push=true
cache-from: type=gha,scope=${{ matrix.arch }}
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
env:
DOCKER_CONTENT_TRUST: true
- name: "Failure: output container logs to console"
if: failure()
run: docker compose logs
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
digest="${{ steps.build-preview.outputs.digest}}"
else
digest="${{ steps.build-release.outputs.digest}}"
fi
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ matrix.arch }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
merge-docker-image:
runs-on: ubuntu-22.04
needs: build-docker-image
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASS }}
- name: Download digests
uses: actions/download-artifact@v4
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Create and push manifest for the preview image
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:preview \
$(printf '${{ vars.DOCKER_USER }}/redash:preview@sha256:%s ' *)
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
- name: Create and push manifest for the release image
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)

View File

@@ -34,7 +34,7 @@ clean:
clean-all: clean
docker image rm --force \
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
redash/redash:latest redis:7-alpine maildev/maildev:latest \
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
down:

View File

@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
// DataSourcePreviewCard
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
return (
<PreviewCard {...props} imageUrl={imageUrl} title={title}>

View File

@@ -96,7 +96,7 @@ function EmptyState({
}, []);
// Show if `onboardingMode=false` or any requested step not completed
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
if (!shouldShow) {
return null;
@@ -181,7 +181,7 @@ function EmptyState({
];
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
return (
<div className="empty-state-wrapper">
@@ -196,7 +196,7 @@ function EmptyState({
</div>
<div className="empty-state__steps">
<h4>Let&apos;s get started</h4>
<ol>{stepsItems.map(item => item.node)}</ol>
<ol>{stepsItems.map((item) => item.node)}</ol>
{helpMessage}
</div>
</div>

View File

@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
import React from "react";
export function QuerySourceTypeIcon(props) {
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
}
QuerySourceTypeIcon.propTypes = {

View File

@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
<div className="query-results-empty-state">
<div className="empty-state-content">
<div>
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
<img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
</div>
<h3>{title}</h3>
<div className="m-b-20">{message}</div>
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
const handleDelete = useCallback(
e => {
(e) => {
e.stopPropagation();
Modal.confirm({
title: "Delete Visualization",
@@ -111,7 +111,8 @@ export default function QueryVisualizationTabs({
className="add-visualization-button"
data-test="NewVisualization"
type="link"
onClick={() => onAddVisualization()}>
onClick={() => onAddVisualization()}
>
<i className="fa fa-plus" aria-hidden="true" />
<span className="m-l-5 hidden-xs">Add Visualization</span>
</Button>
@@ -119,7 +120,7 @@ export default function QueryVisualizationTabs({
}
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
const isMobile = useMedia({ maxWidth: 768 });
const [filters, setFilters] = useState([]);
@@ -132,9 +133,10 @@ export default function QueryVisualizationTabs({
data-test="QueryPageVisualizationTabs"
animated={false}
tabBarGutter={0}
onChange={activeKey => onChangeTab(+activeKey)}
destroyInactiveTabPane>
{orderedVisualizations.map(visualization => (
onChange={(activeKey) => onChangeTab(+activeKey)}
destroyInactiveTabPane
>
{orderedVisualizations.map((visualization) => (
<TabPane
key={`${visualization.id}`}
tab={
@@ -144,7 +146,8 @@ export default function QueryVisualizationTabs({
visualizationName={visualization.name}
onDelete={() => onDeleteVisualization(visualization.id)}
/>
}>
}
>
{queryResult ? (
<VisualizationRenderer
visualization={visualization}

View File

@@ -1,16 +1,11 @@
import { useCallback, useMemo, useState } from "react";
import { reduce } from "lodash";
import localOptions from "@/lib/localOptions";
function calculateTokensCount(schema) {
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
}
export default function useAutocompleteFlags(schema) {
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]);
const isAvailable = true;
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
const toggleAutocomplete = useCallback(state => {
const toggleAutocomplete = useCallback((state) => {
setIsEnabled(state);
localOptions.set("liveAutocomplete", state);
}, []);

View File

@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
export const SCHEMA_NOT_SUPPORTED = 1;
export const SCHEMA_LOAD_ERROR = 2;
export const IMG_ROOT = "static/images/db-logos";
export const IMG_ROOT = "/static/images/db-logos";
function mapSchemaColumnsToObject(columns) {
return map(columns, column => (isObject(column) ? column : { name: column }));
return map(columns, (column) => (isObject(column) ? column : { name: column }));
}
const DataSource = {
query: () => axios.get("api/data_sources"),
get: ({ id }) => axios.get(`api/data_sources/${id}`),
types: () => axios.get("api/data_sources/types"),
create: data => axios.post(`api/data_sources`, data),
save: data => axios.post(`api/data_sources/${data.id}`, data),
test: data => axios.post(`api/data_sources/${data.id}/test`),
create: (data) => axios.post(`api/data_sources`, data),
save: (data) => axios.post(`api/data_sources/${data.id}`, data),
test: (data) => axios.post(`api/data_sources/${data.id}/test`),
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
fetchSchema: (data, refresh = false) => {
const params = {};
@@ -27,15 +27,15 @@ const DataSource = {
return axios
.get(`api/data_sources/${data.id}/schema`, { params })
.then(data => {
.then((data) => {
if (has(data, "job")) {
return fetchDataFromJob(data.job.id).catch(error =>
return fetchDataFromJob(data.job.id).catch((error) =>
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
);
}
return has(data, "schema") ? data.schema : Promise.reject();
})
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
.then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
},
};

View File

@@ -63,7 +63,7 @@ function runCypressCI() {
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
} = process.env;
if (GITHUB_REPOSITORY === "getredash/redash") {
if (GITHUB_REPOSITORY === "getredash/redash" && process.env.CYPRESS_RECORD_KEY) {
process.env.CYPRESS_OPTIONS = "--record";
}

View File

@@ -1,6 +1,6 @@
{
"name": "redash-client",
"version": "25.01.0-dev",
"version": "25.02.0-dev",
"description": "The frontend part of Redash.",
"main": "index.js",
"scripts": {

70
poetry.lock generated
View File

@@ -4644,56 +4644,56 @@ files = [
[[package]]
name = "snowflake-connector-python"
version = "3.12.3"
version = "3.13.1"
description = "Snowflake Connector for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "snowflake_connector_python-3.12.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:497a096fc379ef0846b2f1cf11a8d7620f0d090f08a77d9e93473845014d57d1"},
{file = "snowflake_connector_python-3.12.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:055c5808d524497213e4cc9ae91ec3e46cb8342b314e78bc3e139d733dc16741"},
{file = "snowflake_connector_python-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a5dc512d62ef693041ed2ad82931231caddc16e14ffc2842da3e3dd4240b83d"},
{file = "snowflake_connector_python-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a46448f7279d444084eb84a9cddea67662e80ccfaddf41713b9e9aab2b1242e9"},
{file = "snowflake_connector_python-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:821b774b77129ce9f03729456ac1f21d69fedb50e5ce957178131c7bb3d8279f"},
{file = "snowflake_connector_python-3.12.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82290134978d11628026b447052219ce8d880e36937204f1f0332dfc3f2e92e9"},
{file = "snowflake_connector_python-3.12.3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:20b5c8000ee9cee11b0f9a6ae26640f0d498ce77f7e2ec649a2f0d306523792d"},
{file = "snowflake_connector_python-3.12.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca6500d16bdbd37da88e589cc3e82b90272471d3aabfe4a79ec1cf4696675acf"},
{file = "snowflake_connector_python-3.12.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b455ba117a68da436e253899674fae1a93669eaefdde8a903c03eb65b7e87c86"},
{file = "snowflake_connector_python-3.12.3-cp311-cp311-win_amd64.whl", hash = "sha256:205219fcaeee2d33db5d0d023d60518e3bd8272ce1679be2199d7f362d255054"},
{file = "snowflake_connector_python-3.12.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d830ca32c864b730cba5d92900d850752199635c4fb0ae0a70ee677f62aee70"},
{file = "snowflake_connector_python-3.12.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:597b0c74ec57ba693191ae2de8db9536e349ee32cab152df657473e498b6fd87"},
{file = "snowflake_connector_python-3.12.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2215d8a4c5e25ea0d2183fe693c3fdf058cd6035e5c84710d532dc04ab4ffd31"},
{file = "snowflake_connector_python-3.12.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ba9c261904c1ba7cae6035c7881224cf979da39c8b7c7cb10236fdfc57e505"},
{file = "snowflake_connector_python-3.12.3-cp312-cp312-win_amd64.whl", hash = "sha256:f0d0fcb948ef0812ab162ec9767622f345554043a07439c0c1a9474c86772320"},
{file = "snowflake_connector_python-3.12.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fe742a0b2fb1c79a21e95b97c49a05783bc00314d1184d227c5fe5b57688af12"},
{file = "snowflake_connector_python-3.12.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:a8584a44a6bb41d2056cf1b833e629c76e28c5303d2c875c1a23bda46a1cd43a"},
{file = "snowflake_connector_python-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd990db8e4886c32ba5c63758e8dc4814e2e75f5fd3fe79d43f7e5ee0fc46793"},
{file = "snowflake_connector_python-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4fe7f91f6e44bda877e77403a586d7487ca2c52dc1a32a705b2fea33f9c763a"},
{file = "snowflake_connector_python-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:4994e95eff593dc44c28243ef0ae8d27b8b1aeb96dd64cbcea5bcf0e4dfb77fb"},
{file = "snowflake_connector_python-3.12.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ac33a7dd54b35f94c4b91369971dbd6467a914dff4b01c46e77e7e6901d7eca4"},
{file = "snowflake_connector_python-3.12.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a26876322811fe2b93f6d814dcfe016f1df680a12624026ecf57a6bcdf20f969"},
{file = "snowflake_connector_python-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0bb390be2e15b6b7cccab7fbe1ef94e1e9ab13790c974aa44761298cdc2641"},
{file = "snowflake_connector_python-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7340f73af4ae72e6af8fe28a1b8e196a0c99943071afc96ce419efb4da80035"},
{file = "snowflake_connector_python-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:c314749bd0151218b654a7d4646a39067ab650bdc86dfebb1884b056b0bdb4b4"},
{file = "snowflake_connector_python-3.12.3.tar.gz", hash = "sha256:02873c7f7a3b10322e28dddc2be6907f8ab8ecad93d6d6af14c77c2f53091b88"},
{file = "snowflake_connector_python-3.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b636641df38b7b951f62a7f53a6444576bbbadddd2d73615f7ceade4e79b32d7"},
{file = "snowflake_connector_python-3.13.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:bb7b617de91a74ba69057f4b78ef685dfd14c18fc5208861c0bf4d733fb80b7a"},
{file = "snowflake_connector_python-3.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab4707f2d7bf2e4202c50b5c6250bff1fcbab471b1d82c2608c0adafd3970dc"},
{file = "snowflake_connector_python-3.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15e00c0972233884d6b881efcdb5467a415fea1aa094a55985dc5aad66a3711"},
{file = "snowflake_connector_python-3.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:05be6d96a4b771c1c81be947f6ca5c8b22925f4e415cc9d0c05dd0bdfaaee5cd"},
{file = "snowflake_connector_python-3.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e51c3b0ca8b964c96e8592112422cb038d22bc4f94c9443a17871225df9f0de4"},
{file = "snowflake_connector_python-3.13.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:4f34b805cae831ab8791599c513b0bd65186981ae5b7d0e22001922dcb3a29c5"},
{file = "snowflake_connector_python-3.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bdbf58f625bb126a15112d3e6e35c68c2b5a7309a8faa173b3e80a284c4499e"},
{file = "snowflake_connector_python-3.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51938948af9fda9d53acfce2e440963479c8b9cd0d05f8cbed06bd42bdf9c7ac"},
{file = "snowflake_connector_python-3.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:8e4bb3f3a9dd5b5c516e6414c5991787715da5067a3d6d5ccdd2d124c56cdfef"},
{file = "snowflake_connector_python-3.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f856fc29e7bacdec6ffa449de1d91da50637ba8d9fa675cec640f5853b2a79ba"},
{file = "snowflake_connector_python-3.13.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:ba9bf43425e8938d7bdc0f0d9488783846e810af8173a1a35e642b6795180ca0"},
{file = "snowflake_connector_python-3.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1540099913bafbccf7f2ca4b0e49893ee44f0645c94ec59f7c7c147085e64d5"},
{file = "snowflake_connector_python-3.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8ee93ed3e89452b411cfcbe22efcf4796910ca014add2aa9bae0e8648e3b207"},
{file = "snowflake_connector_python-3.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:0a55795f2d737adf89dbbc7829c971d44742fd3c0bf0401314a26178cdcde700"},
{file = "snowflake_connector_python-3.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2435e630d9cec64574045c38d3f1c795ee91b157dae12187c585318dcb42572d"},
{file = "snowflake_connector_python-3.13.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:ad2646fb1704f43867718fa1b2b5d5b5ca8352b4fef2ef7f19bb18bd5b9ec5bd"},
{file = "snowflake_connector_python-3.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d56d43e5ddacc63b3b5e87bd98ee206da52388aac4a6e0d460e225430c5f1897"},
{file = "snowflake_connector_python-3.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f32e0dcce63ff9ebdcad0a3c24465232a9667d38071c048b116b0bef85812a5"},
{file = "snowflake_connector_python-3.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:8dfa6398a9424fad23fe32b0db5e27553453f8062e2ea498b8d81ff825791e6c"},
{file = "snowflake_connector_python-3.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622484c3eedb9f4deb2a4f5124e25580bc43c39220e3c7ec01e18898e9a8020e"},
{file = "snowflake_connector_python-3.13.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:8fdf757ca07ef4241bcc9d82123d104ab1cdb8525421ff56dab6d86802a59b81"},
{file = "snowflake_connector_python-3.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcce282b3ac906063df7e16678b0c829b44384070b63c01a643fe67cd82343a"},
{file = "snowflake_connector_python-3.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60401ab607be786081a0ba02128b230ab5c6afa7babaa215a327943dc0e293ac"},
{file = "snowflake_connector_python-3.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:afcae3b8a38ba24ec0ca65208f282c234add7624cecbf968a564ebd2324a3287"},
{file = "snowflake_connector_python-3.13.1.tar.gz", hash = "sha256:6d0f515f24efb58c7dae26ac681dac032cb4d0c94a1cb676a50b5d41c812b5b2"},
]
[package.dependencies]
asn1crypto = ">0.24.0,<2.0.0"
certifi = ">=2017.4.17"
cffi = ">=1.9,<2.0.0"
charset-normalizer = ">=2,<4"
charset_normalizer = ">=2,<4"
cryptography = ">=3.1.0"
filelock = ">=3.5,<4"
idna = ">=2.5,<4"
packaging = "*"
platformdirs = ">=2.6.0,<5.0.0"
pyjwt = "<3.0.0"
pyOpenSSL = ">=16.2.0,<25.0.0"
pyOpenSSL = ">=22.0.0,<25.0.0"
pytz = "*"
requests = "<3.0.0"
sortedcontainers = ">=2.4.0"
tomlkit = "*"
typing-extensions = ">=4.3,<5"
typing_extensions = ">=4.3,<5"
urllib3 = {version = ">=1.21.1,<2.0.0", markers = "python_version < \"3.10\""}
[package.extras]
@@ -5154,13 +5154,13 @@ six = ">=1.10.0"
[[package]]
name = "virtualenv"
version = "20.25.0"
version = "20.26.6"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.7"
files = [
{file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
{file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
{file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"},
{file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"},
]
[package.dependencies]
@@ -5169,7 +5169,7 @@ filelock = ">=3.12.2,<4"
platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]]
@@ -5493,4 +5493,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8,<3.11"
content-hash = "971596e47325293cbc984bb5a8aabd88a211f4ff4bbd72323f5eb6a168643feb"
content-hash = "3ca1687df8b492af64ad73038d700cdabbd9ff4e3adda001ed7ca5976492de5e"

View File

@@ -12,7 +12,7 @@ force-exclude = '''
[tool.poetry]
name = "redash"
version = "25.01.0-dev"
version = "25.02.0-dev"
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
authors = ["Arik Fraimovich <arik@redash.io>"]
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
@@ -132,7 +132,7 @@ python-rapidjson = "1.20"
requests-aws-sign = "0.1.5"
sasl = ">=0.1.3"
simple-salesforce = "0.74.3"
snowflake-connector-python = "3.12.3"
snowflake-connector-python = "3.13.1"
td-client = "1.0.0"
thrift = ">=0.8.0"
thrift-sasl = ">=0.1.0"

View File

@@ -14,7 +14,7 @@ from redash.app import create_app # noqa
from redash.destinations import import_destinations
from redash.query_runner import import_query_runners
__version__ = "25.01.0-dev"
__version__ = "25.02.0-dev"
if os.environ.get("REMOTE_DEBUG"):

View File

@@ -1,3 +1,5 @@
import html
import json
import logging
from copy import deepcopy
@@ -37,6 +39,129 @@ class Webex(BaseDestination):
@staticmethod
def formatted_attachments_template(subject, description, query_link, alert_link):
# Attempt to parse the description to find a 2D array
try:
# Extract the part of the description that looks like a JSON array
start_index = description.find("[")
end_index = description.rfind("]") + 1
json_array_str = description[start_index:end_index]
# Decode HTML entities
json_array_str = html.unescape(json_array_str)
# Replace single quotes with double quotes for valid JSON
json_array_str = json_array_str.replace("'", '"')
# Load the JSON array
data_array = json.loads(json_array_str)
# Check if it's a 2D array
if isinstance(data_array, list) and all(isinstance(i, list) for i in data_array):
# Create a table for the Adaptive Card
table_rows = []
for row in data_array:
table_rows.append(
{
"type": "ColumnSet",
"columns": [
{"type": "Column", "items": [{"type": "TextBlock", "text": str(item), "wrap": True}]}
for item in row
],
}
)
# Create the body of the card with the table
body = (
[
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description[:start_index]}",
"isSubtle": True,
"wrap": True,
},
]
+ table_rows
+ [
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
)
else:
# Fallback to the original description if no valid 2D array is found
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description}",
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
except json.JSONDecodeError:
# If parsing fails, fallback to the original description
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description}",
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
return [
{
"contentType": "application/vnd.microsoft.card.adaptive",
@@ -44,44 +169,7 @@ class Webex(BaseDestination):
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.0",
"body": [
{
"type": "ColumnSet",
"columns": [
{
"type": "Column",
"width": 4,
"items": [
{
"type": "TextBlock",
"text": {subject},
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": {description},
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
],
},
],
}
],
"body": body,
},
}
]
@@ -116,6 +204,10 @@ class Webex(BaseDestination):
# destinations is guaranteed to be a comma-separated string
for destination_id in destinations.split(","):
destination_id = destination_id.strip() # Remove any leading or trailing whitespace
if not destination_id: # Check if the destination_id is empty or blank
continue # Skip to the next iteration if it's empty or blank
payload = deepcopy(template_payload)
payload[payload_tag] = destination_id
self.post_message(payload, headers)

View File

@@ -304,7 +304,7 @@ class BigQuery(BaseQueryRunner):
datasets = self._get_project_datasets(project_id)
query_base = """
SELECT table_schema, table_name, field_path
SELECT table_schema, table_name, field_path, data_type
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
WHERE table_schema NOT IN ('information_schema')
"""
@@ -325,7 +325,7 @@ class BigQuery(BaseQueryRunner):
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append(row["field_path"])
schema[table_name]["columns"].append({"name": row["field_path"], "type": row["data_type"]})
return list(schema.values())

View File

@@ -92,7 +92,7 @@ class BaseElasticSearch(BaseQueryRunner):
logger.setLevel(logging.DEBUG)
self.server_url = self.configuration.get("server", "")
if self.server_url[-1] == "/":
if self.server_url and self.server_url[-1] == "/":
self.server_url = self.server_url[:-1]
basic_auth_user = self.configuration.get("basic_auth_user", None)

View File

@@ -261,15 +261,19 @@ def test_webex_notify_calls_requests_post():
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {"webex_bot_token": "abcd", "to_room_ids": "1234"}
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234,5678",
"to_person_emails": "example1@test.com,example2@test.com",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
@@ -277,7 +281,7 @@ def test_webex_notify_calls_requests_post():
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 204
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
@@ -285,13 +289,111 @@ def test_webex_notify_calls_requests_post():
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
formatted_attachments = Webex.formatted_attachments_template(
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload_room = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
expected_payload_email = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"toPersonEmail": "example1@test.com",
}
# Check that requests.post was called for both roomId and toPersonEmail destinations
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_room,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_email,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
def test_webex_notify_handles_blank_entries():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "",
"to_person_emails": "",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
destination.notify(alert, query, user, new_state, app, host, metadata, options)
# Ensure no API calls are made when destinations are blank
mock_post.assert_not_called()
def test_webex_notify_handles_2d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with table [['Col1', 'Col2'], ['Val1', 'Val2']]"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": formatted_attachments,
"attachments": expected_attachments,
"roomId": "1234",
}
@@ -302,7 +404,60 @@ def test_webex_notify_calls_requests_post():
timeout=5.0,
)
assert mock_response.status_code == 204
assert mock_response.status_code == 200
def test_webex_notify_handles_1d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with 1D array, however unlikely ['Col1', 'Col2']"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
mock_post.assert_called_once_with(
destination.api_base_url,
json=expected_payload,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
def test_datadog_notify_calls_requests_post():

View File

@@ -5,7 +5,7 @@ Object {
"columns": Array [
Object {
"alignContent": "right",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -38,7 +38,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -71,7 +71,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -104,7 +104,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": true,
"booleanValues": Array [
"false",
@@ -137,7 +137,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": true,
"allowHTML": false,
"allowSearch": false,
"booleanValues": Array [
"false",

View File

@@ -54,7 +54,7 @@ function getDefaultColumnsOptions(columns: any) {
allowSearch: false,
alignContent: getColumnContentAlignment(col.type),
// `string` cell options
allowHTML: true,
allowHTML: false,
highlightLinks: false,
}));
}