Compare commits

...

32 Commits

Author SHA1 Message Date
github-actions[bot]
6b1e910126 Snapshot: 25.06.0-dev 2025-06-01 00:45:45 +00:00
Tsuneo Yoshioka
14550a9a6c Fix: saving empty query with auto limit crashes (#7430)
Co-authored-by: Eric Radman <eradman@starfishstorage.com>
2025-05-20 14:26:17 +00:00
Emmanuel Ferdman
b80c5f6a7c Update assertion method in JSON dumps test (#7424)
Signed-off-by: Emmanuel Ferdman <emmanuelferdman@gmail.com>
Co-authored-by: snickerjp <snickerjp@gmail.com>
2025-05-18 12:03:41 -07:00
Tsuneo Yoshioka
e46d44f208 include Plotly.js localization (#7323) 2025-05-16 19:17:32 -04:00
Tsuneo Yoshioka
a1a4bc9d3e TypeScript sourcemap for viz-lib (#7336) 2025-05-12 18:08:33 -04:00
Tsuneo Yoshioka
0900178d24 Change query processing wait time to make response quick (#7320)
* Change query processing wait time to make more response quick

* Fix styling errors reportered by restyled
2025-05-07 01:22:35 +00:00
Gleb Lesnikov
5d31429ca8 Update Azure Data Explorer query runner to latest version (#7411)
* Update Azure Data Explorer query runner to latest version

* Fix black issue

* downgrade azure-kusto-data to 4.6.3

* Freeze numpy to 1.24.4 because of 2.0.0 incompatibility

* Fix failing test

* Reformat test
2025-05-05 06:53:07 +00:00
Eric Radman
2f35ceb803 Push image using DOCKER_REPOSITORY (#7428)
Preview images work for personal repositories, but we missed another variable
when publishing official images:

  #34 [auth] arikfr/redash:pull,push token for registry-1.docker.io
  #34 DONE 0.0s
  #33 exporting to image
  #33 pushing layers 15.5s done
  #33 pushing manifest for docker.io/arikfr/redash
  #33 pushing manifest for docker.io/arikfr/redash 1.6s done
  #33 ...
  #35 [auth] arikfr/preview:pull,push token for registry-1.docker.io
  #35 DONE 0.0s
2025-05-04 23:18:53 -07:00
Lucas Fernando Cardoso Nunes
8e6c02ecde ci: snapshot only on default branch (#7355) 2025-05-01 13:15:57 +00:00
github-actions[bot]
231fd36d46 Snapshot: 25.05.0-dev 2025-05-01 00:39:58 +00:00
Tsuneo Yoshioka
0b6a53a079 Add translate="no" to html tag to prevent redash from translating and crashing (#7421) 2025-04-29 12:36:26 -04:00
Tsuneo Yoshioka
6167edf97c Change BigQuery super class from BaseQueryRunner to BaseSQLQueryRunner (#7378) 2025-04-16 16:28:17 +09:00
Tsuneo Yoshioka
4ed0ad3c9c BigQuery: Avoid too long(10 seconds) interval for bigquery api to get results (#7342) 2025-04-14 11:40:24 +00:00
Eric Radman
2375f0b05f Partiallly Revert "Remove workaround from check_csrf() (#6919)" (#7327)
This workaround was missing 'if view is not None ' as found in
https://github.com/pallets-eco/flask-wtf/pull/419/files

Tested with MULTI_ORG enabled.
2025-04-10 22:25:49 +00:00
Eric Radman
eced377ae4 Require vars.DOCKER_REPOSITORY to publish image (#7400)
To allow user arikfr to publish images to redash/redash and redash/preview.
Only use vars.DOCKER_USER and secrets.DOCKER_PASSWORD for authorization.
2025-04-03 15:27:11 -04:00
Tsuneo Yoshioka
84262fe143 Fix table item list ordering (#7366)
Fix query list item list sorting

- descending order, no triangle mark
- ascending order, up triangle mark(▲)
- descending order, down triangle mark(▼)
- ascending order, no triangle mark
- descending order, up triangle mark(▲)
- ascending order, down triangle mark(▼)
- descending order, no triangle mark

"sorting order" have 2-click cycle, but "triangle mark" have 3-click cycle.
2025-04-03 16:51:20 +00:00
github-actions[bot]
612eb8c630 Snapshot: 25.04.0-dev 2025-04-01 00:39:21 +00:00
dependabot[bot]
866fb48afb Bump tar-fs from 2.1.1 to 2.1.2 (#7385) 2025-03-29 04:56:15 +00:00
Tsuneo Yoshioka
353776e8e1 Fix to make "show data labels" on bar chart works (#7363) 2025-03-17 11:43:02 -04:00
Tsuneo Yoshioka
594e2f24ef Upgrade plotly.js to version 2 to fix the UI crashing issue (#7359)
* Upgrade plotly.js to version 2

* Fix styling error reported by styled
2025-03-05 14:30:28 +00:00
github-actions[bot]
3275a9e459 Snapshot: 25.03.0-dev 2025-03-01 00:35:44 +00:00
Shunki
3bad8c8e8c TiDB: Exclude INFORMATION_SCHEMA (#7352)
Co-authored-by: snickerjp <snickerjp@gmail.com>
2025-02-28 11:09:46 +09:00
Tsuneo Yoshioka
d0af4499d6 Sanitize NaN, Infinite, -Infinite causing error when saving as PostgreSQL JSON #7339 (2nd try) (#7348)
* Sanitize NaN, Infinite, -Infinite causing error when saving as PostgreSQL JSON #7339 (2nd try)

* Move json nsanitaize to on the top of json_dumps

* Fix comment
2025-02-27 01:40:43 -08:00
Ran Benita
4357ea56ae Fix UnboundLocalError when checking alerts for query (#7346)
This fixes the following exception:

```
UnboundLocalError: local variable 'value_is_number' referenced before assignment
  File "rq/worker.py", line 1431, in perform_job
    rv = job.perform()
  File "rq/job.py", line 1280, in perform
    self._result = self._execute()
  File "rq/job.py", line 1317, in _execute
    result = self.func(*self.args, **self.kwargs)
  File "redash/tasks/alerts.py", line 36, in check_alerts_for_query
    new_state = alert.evaluate()
  File "redash/models/__init__.py", line 1002, in evaluate
    new_state = next_state(op, value, threshold)
  File "redash/models/__init__.py", line 928, in next_state
    elif not value_is_number and op not in [OPERATORS.get("!="), OPERATORS.get("=="), OPERATORS.get("equals")]:
```
2025-02-25 09:15:20 -05:00
Tsuneo Yoshioka
5df5ca87a2 add NULLS LAST option for Query order (#7341) 2025-02-25 10:58:48 +08:00
Tsuneo Yoshioka
8387fe6fcb Fix the issue that chart(scatter, line, bubble...) having same x-value have wrong y-value (#7330) 2025-02-18 20:04:12 +00:00
snickerjp
e95de2ee4c Update oracledb package to version 2.5.1 and adjust Python version compatibility (#7316) 2025-02-18 23:00:09 +09:00
Lee2532
71902e5933 FIX : redash docker image TAG (#7280)
Co-authored-by: snickerjp <snickerjp@gmail.com>
2025-02-15 01:38:23 +09:00
Tsuneo Yoshioka
53eab14cef Make autocomplete always available (#7326) 2025-02-13 15:25:39 -05:00
Eric Radman
925bb91d8e Use absolute path for image resources (#7322)
When MULTI_ORG is enabled, 'static/' resolves to '<org>/static/'
2025-02-12 08:37:40 -05:00
Tsuneo Yoshioka
ec2ca6f986 BigQuery: show column type on Schema Browser (#7257) 2025-02-05 18:25:39 +00:00
Matt Nelson
96ea0194e8 Fix errors in webex alert destination. Add formatting support for QUERY_RESULT_TABLE. (#7296)
* prevent text values in payload being detected as 'set' on send.
Webex send ERROR:: Object of type set is not JSON serializable

Signed-off-by: Matt Nelson <metheos@gmail.com>

* add support for formatted QUERY_RESULT_TABLE in webex card

Signed-off-by: Matt Nelson <metheos@gmail.com>

* don't try to send to blank destinations

Signed-off-by: Matt Nelson <metheos@gmail.com>

* fix handling of the encoded QUERY_RESULTS_TABLE text

Signed-off-by: Matt Nelson <metheos@gmail.com>

* re-sort imports for ruff

Signed-off-by: Matt Nelson <metheos@gmail.com>

* change formatter to black

Signed-off-by: Matt Nelson <metheos@gmail.com>

* Add additional tests for Webex notification handling

ensure blank entries are handled for room IDs and person emails.
ensure that the API is not called when no valid destinations are provided.
ensure proper attachment formatting for alerts containing 2D arrays.

Signed-off-by: Matt Nelson <metheos@gmail.com>

* Add test for Webex notification with 1D array handling

This commit introduces a new test case to verify that the Webex
notification function correctly handles a 1D array input in the alert body.
The test ensures that the expected payload is constructed properly and that
the requests.post method is called with the correct parameters.

Signed-off-by: Matt Nelson <metheos@gmail.com>

---------

Signed-off-by: Matt Nelson <metheos@gmail.com>
2025-02-04 11:05:13 +00:00
46 changed files with 3604 additions and 4310 deletions

View File

@@ -2,7 +2,7 @@ name: Periodic Snapshot
on: on:
schedule: schedule:
- cron: '10 0 1 * *' # 10 minutes after midnight on the first of every month - cron: '10 0 1 * *' # 10 minutes after midnight on the first day of every month
workflow_dispatch: workflow_dispatch:
inputs: inputs:
bump: bump:
@@ -24,6 +24,7 @@ permissions:
jobs: jobs:
bump-version-and-tag: bump-version-and-tag:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.ref_name == github.event.repository.default_branch
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:

View File

@@ -32,6 +32,9 @@ jobs:
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
echo 'Docker password is empty. Skipping build+push' echo 'Docker password is empty. Skipping build+push'
echo skip=true >> "$GITHUB_OUTPUT" echo skip=true >> "$GITHUB_OUTPUT"
elif [[ "${{ vars.DOCKER_REPOSITORY }}" == '' ]]; then
echo 'Docker repository is empty. Skipping build+push'
echo skip=true >> "$GITHUB_OUTPUT"
else else
echo 'Docker user and password are set and branch is `master`.' echo 'Docker user and password are set and branch is `master`.'
echo 'Building + pushing `preview` image.' echo 'Building + pushing `preview` image.'
@@ -97,8 +100,8 @@ jobs:
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }} if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
with: with:
tags: | tags: |
${{ vars.DOCKER_USER }}/redash ${{ vars.DOCKER_REPOSITORY }}/redash
${{ vars.DOCKER_USER }}/preview ${{ vars.DOCKER_REPOSITORY }}/preview
context: . context: .
build-args: | build-args: |
test_all_deps=true test_all_deps=true
@@ -114,7 +117,7 @@ jobs:
if: ${{ github.event.inputs.dockerRepository == 'redash' }} if: ${{ github.event.inputs.dockerRepository == 'redash' }}
with: with:
tags: | tags: |
${{ vars.DOCKER_USER }}/redash:${{ steps.version.outputs.VERSION_TAG }} ${{ vars.DOCKER_REPOSITORY }}/redash:${{ steps.version.outputs.VERSION_TAG }}
context: . context: .
build-args: | build-args: |
test_all_deps=true test_all_deps=true
@@ -169,14 +172,14 @@ jobs:
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }} if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
working-directory: ${{ runner.temp }}/digests working-directory: ${{ runner.temp }}/digests
run: | run: |
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:preview \ docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:preview \
$(printf '${{ vars.DOCKER_USER }}/redash:preview@sha256:%s ' *) $(printf '${{ vars.DOCKER_REPOSITORY }}/redash:preview@sha256:%s ' *)
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \ docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_USER }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *) $(printf '${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
- name: Create and push manifest for the release image - name: Create and push manifest for the release image
if: ${{ github.event.inputs.dockerRepository == 'redash' }} if: ${{ github.event.inputs.dockerRepository == 'redash' }}
working-directory: ${{ runner.temp }}/digests working-directory: ${{ runner.temp }}/digests
run: | run: |
docker buildx imagetools create -t ${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \ docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_USER }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *) $(printf '${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)

View File

@@ -34,7 +34,7 @@ clean:
clean-all: clean clean-all: clean
docker image rm --force \ docker image rm --force \
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \ redash/redash:latest redis:7-alpine maildev/maildev:latest \
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
down: down:

View File

@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
// DataSourcePreviewCard // DataSourcePreviewCard
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) { export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
const imageUrl = `static/images/db-logos/${dataSource.type}.png`; const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name; const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
return ( return (
<PreviewCard {...props} imageUrl={imageUrl} title={title}> <PreviewCard {...props} imageUrl={imageUrl} title={title}>

View File

@@ -96,7 +96,7 @@ function EmptyState({
}, []); }, []);
// Show if `onboardingMode=false` or any requested step not completed // Show if `onboardingMode=false` or any requested step not completed
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]); const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
if (!shouldShow) { if (!shouldShow) {
return null; return null;
@@ -181,7 +181,7 @@ function EmptyState({
]; ];
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems; const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg"; const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
return ( return (
<div className="empty-state-wrapper"> <div className="empty-state-wrapper">
@@ -196,7 +196,7 @@ function EmptyState({
</div> </div>
<div className="empty-state__steps"> <div className="empty-state__steps">
<h4>Let&apos;s get started</h4> <h4>Let&apos;s get started</h4>
<ol>{stepsItems.map(item => item.node)}</ol> <ol>{stepsItems.map((item) => item.node)}</ol>
{helpMessage} {helpMessage}
</div> </div>
</div> </div>

View File

@@ -28,6 +28,7 @@ export interface Controller<I, P = any> {
orderByField?: string; orderByField?: string;
orderByReverse: boolean; orderByReverse: boolean;
toggleSorting: (orderByField: string) => void; toggleSorting: (orderByField: string) => void;
setSorting: (orderByField: string, orderByReverse: boolean) => void;
// pagination // pagination
page: number; page: number;
@@ -139,10 +140,11 @@ export function wrap<I, P = any>(
this.props.onError!(error); this.props.onError!(error);
const initialState = this.getState({ ...itemsSource.getState(), isLoaded: false }); const initialState = this.getState({ ...itemsSource.getState(), isLoaded: false });
const { updatePagination, toggleSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource; const { updatePagination, toggleSorting, setSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
this.state = { this.state = {
...initialState, ...initialState,
toggleSorting, // eslint-disable-line react/no-unused-state toggleSorting, // eslint-disable-line react/no-unused-state
setSorting, // eslint-disable-line react/no-unused-state
updateSearch: debounce(updateSearch, 200), // eslint-disable-line react/no-unused-state updateSearch: debounce(updateSearch, 200), // eslint-disable-line react/no-unused-state
updateSelectedTags, // eslint-disable-line react/no-unused-state updateSelectedTags, // eslint-disable-line react/no-unused-state
updatePagination, // eslint-disable-line react/no-unused-state updatePagination, // eslint-disable-line react/no-unused-state

View File

@@ -39,14 +39,12 @@ export class ItemsSource {
const customParams = {}; const customParams = {};
const context = { const context = {
...this.getCallbackContext(), ...this.getCallbackContext(),
setCustomParams: params => { setCustomParams: (params) => {
extend(customParams, params); extend(customParams, params);
}, },
}; };
return this._beforeUpdate().then(() => { return this._beforeUpdate().then(() => {
const fetchToken = Math.random() const fetchToken = Math.random().toString(36).substr(2);
.toString(36)
.substr(2);
this._currentFetchToken = fetchToken; this._currentFetchToken = fetchToken;
return this._fetcher return this._fetcher
.fetch(changes, state, context) .fetch(changes, state, context)
@@ -59,7 +57,7 @@ export class ItemsSource {
return this._afterUpdate(); return this._afterUpdate();
} }
}) })
.catch(error => this.handleError(error)); .catch((error) => this.handleError(error));
}); });
} }
@@ -124,13 +122,20 @@ export class ItemsSource {
}); });
}; };
toggleSorting = orderByField => { toggleSorting = (orderByField) => {
this._sorter.toggleField(orderByField); this._sorter.toggleField(orderByField);
this._savedOrderByField = this._sorter.field; this._savedOrderByField = this._sorter.field;
this._changed({ sorting: true }); this._changed({ sorting: true });
}; };
updateSearch = searchTerm => { setSorting = (orderByField, orderByReverse) => {
this._sorter.setField(orderByField);
this._sorter.setReverse(orderByReverse);
this._savedOrderByField = this._sorter.field;
this._changed({ sorting: true });
};
updateSearch = (searchTerm) => {
// here we update state directly, but later `fetchData` will update it properly // here we update state directly, but later `fetchData` will update it properly
this._searchTerm = searchTerm; this._searchTerm = searchTerm;
// in search mode ignore the ordering and use the ranking order // in search mode ignore the ordering and use the ranking order
@@ -145,7 +150,7 @@ export class ItemsSource {
this._changed({ search: true, pagination: { page: true } }); this._changed({ search: true, pagination: { page: true } });
}; };
updateSelectedTags = selectedTags => { updateSelectedTags = (selectedTags) => {
this._selectedTags = selectedTags; this._selectedTags = selectedTags;
this._paginator.setPage(1); this._paginator.setPage(1);
this._changed({ tags: true, pagination: { page: true } }); this._changed({ tags: true, pagination: { page: true } });
@@ -153,7 +158,7 @@ export class ItemsSource {
update = () => this._changed(); update = () => this._changed();
handleError = error => { handleError = (error) => {
if (isFunction(this.onError)) { if (isFunction(this.onError)) {
this.onError(error); this.onError(error);
} }
@@ -172,7 +177,7 @@ export class ResourceItemsSource extends ItemsSource {
processResults: (results, context) => { processResults: (results, context) => {
let processItem = getItemProcessor(context); let processItem = getItemProcessor(context);
processItem = isFunction(processItem) ? processItem : identity; processItem = isFunction(processItem) ? processItem : identity;
return map(results, item => processItem(item, context)); return map(results, (item) => processItem(item, context));
}, },
}); });
} }

View File

@@ -44,7 +44,7 @@ export const Columns = {
date(overrides) { date(overrides) {
return extend( return extend(
{ {
render: text => formatDate(text), render: (text) => formatDate(text),
}, },
overrides overrides
); );
@@ -52,7 +52,7 @@ export const Columns = {
dateTime(overrides) { dateTime(overrides) {
return extend( return extend(
{ {
render: text => formatDateTime(text), render: (text) => formatDateTime(text),
}, },
overrides overrides
); );
@@ -62,7 +62,7 @@ export const Columns = {
{ {
width: "1%", width: "1%",
className: "text-nowrap", className: "text-nowrap",
render: text => durationHumanize(text), render: (text) => durationHumanize(text),
}, },
overrides overrides
); );
@@ -70,7 +70,7 @@ export const Columns = {
timeAgo(overrides, timeAgoCustomProps = undefined) { timeAgo(overrides, timeAgoCustomProps = undefined) {
return extend( return extend(
{ {
render: value => <TimeAgo date={value} {...timeAgoCustomProps} />, render: (value) => <TimeAgo date={value} {...timeAgoCustomProps} />,
}, },
overrides overrides
); );
@@ -110,6 +110,7 @@ export default class ItemsTable extends React.Component {
orderByField: PropTypes.string, orderByField: PropTypes.string,
orderByReverse: PropTypes.bool, orderByReverse: PropTypes.bool,
toggleSorting: PropTypes.func, toggleSorting: PropTypes.func,
setSorting: PropTypes.func,
"data-test": PropTypes.string, "data-test": PropTypes.string,
rowKey: PropTypes.oneOfType([PropTypes.string, PropTypes.func]), rowKey: PropTypes.oneOfType([PropTypes.string, PropTypes.func]),
}; };
@@ -127,18 +128,15 @@ export default class ItemsTable extends React.Component {
}; };
prepareColumns() { prepareColumns() {
const { orderByField, orderByReverse, toggleSorting } = this.props; const { orderByField, orderByReverse } = this.props;
const orderByDirection = orderByReverse ? "descend" : "ascend"; const orderByDirection = orderByReverse ? "descend" : "ascend";
return map( return map(
map( map(
filter(this.props.columns, column => (isFunction(column.isAvailable) ? column.isAvailable() : true)), filter(this.props.columns, (column) => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
column => extend(column, { orderByField: column.orderByField || column.field }) (column) => extend(column, { orderByField: column.orderByField || column.field })
), ),
(column, index) => { (column, index) => {
// Bind click events only to sortable columns
const onHeaderCell = column.sorter ? () => ({ onClick: () => toggleSorting(column.orderByField) }) : null;
// Wrap render function to pass correct arguments // Wrap render function to pass correct arguments
const render = isFunction(column.render) ? (text, row) => column.render(text, row.item) : identity; const render = isFunction(column.render) ? (text, row) => column.render(text, row.item) : identity;
@@ -146,14 +144,13 @@ export default class ItemsTable extends React.Component {
key: "column" + index, key: "column" + index,
dataIndex: ["item", column.field], dataIndex: ["item", column.field],
defaultSortOrder: column.orderByField === orderByField ? orderByDirection : null, defaultSortOrder: column.orderByField === orderByField ? orderByDirection : null,
onHeaderCell,
render, render,
}); });
} }
); );
} }
getRowKey = record => { getRowKey = (record) => {
const { rowKey } = this.props; const { rowKey } = this.props;
if (rowKey) { if (rowKey) {
if (isFunction(rowKey)) { if (isFunction(rowKey)) {
@@ -172,22 +169,43 @@ export default class ItemsTable extends React.Component {
// Bind events only if `onRowClick` specified // Bind events only if `onRowClick` specified
const onTableRow = isFunction(this.props.onRowClick) const onTableRow = isFunction(this.props.onRowClick)
? row => ({ ? (row) => ({
onClick: event => { onClick: (event) => {
this.props.onRowClick(event, row.item); this.props.onRowClick(event, row.item);
}, },
}) })
: null; : null;
const onChange = (pagination, filters, sorter, extra) => {
const action = extra?.action;
if (action === "sort") {
const propsColumn = this.props.columns.find((column) => column.field === sorter.field[1]);
if (!propsColumn.sorter) {
return;
}
let orderByField = propsColumn.orderByField;
const orderByReverse = sorter.order === "descend";
if (orderByReverse === undefined) {
orderByField = null;
}
if (this.props.setSorting) {
this.props.setSorting(orderByField, orderByReverse);
} else {
this.props.toggleSorting(orderByField);
}
}
};
const { showHeader } = this.props; const { showHeader } = this.props;
if (this.props.loading) { if (this.props.loading) {
if (isEmpty(tableDataProps.dataSource)) { if (isEmpty(tableDataProps.dataSource)) {
tableDataProps.columns = tableDataProps.columns.map(column => ({ tableDataProps.columns = tableDataProps.columns.map((column) => ({
...column, ...column,
sorter: false, sorter: false,
render: () => <Skeleton active paragraph={false} />, render: () => <Skeleton active paragraph={false} />,
})); }));
tableDataProps.dataSource = range(10).map(key => ({ key: `${key}` })); tableDataProps.dataSource = range(10).map((key) => ({ key: `${key}` }));
} else { } else {
tableDataProps.loading = { indicator: null }; tableDataProps.loading = { indicator: null };
} }
@@ -200,6 +218,7 @@ export default class ItemsTable extends React.Component {
rowKey={this.getRowKey} rowKey={this.getRowKey}
pagination={false} pagination={false}
onRow={onTableRow} onRow={onTableRow}
onChange={onChange}
data-test={this.props["data-test"]} data-test={this.props["data-test"]}
{...tableDataProps} {...tableDataProps}
/> />

View File

@@ -1,5 +1,5 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="en" translate="no">
<head> <head>
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta charset="UTF-8" /> <meta charset="UTF-8" />

View File

@@ -160,14 +160,15 @@ function QueriesList({ controller }) {
orderByField={controller.orderByField} orderByField={controller.orderByField}
orderByReverse={controller.orderByReverse} orderByReverse={controller.orderByReverse}
toggleSorting={controller.toggleSorting} toggleSorting={controller.toggleSorting}
setSorting={controller.setSorting}
/> />
<Paginator <Paginator
showPageSizeSelect showPageSizeSelect
totalCount={controller.totalItemsCount} totalCount={controller.totalItemsCount}
pageSize={controller.itemsPerPage} pageSize={controller.itemsPerPage}
onPageSizeChange={itemsPerPage => controller.updatePagination({ itemsPerPage })} onPageSizeChange={(itemsPerPage) => controller.updatePagination({ itemsPerPage })}
page={controller.page} page={controller.page}
onChange={page => controller.updatePagination({ page })} onChange={(page) => controller.updatePagination({ page })}
/> />
</div> </div>
</React.Fragment> </React.Fragment>
@@ -196,7 +197,7 @@ const QueriesListPage = itemsList(
}[currentPage]; }[currentPage];
}, },
getItemProcessor() { getItemProcessor() {
return item => new Query(item); return (item) => new Query(item);
}, },
}), }),
() => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true }) () => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true })
@@ -207,7 +208,7 @@ routes.register(
routeWithUserSession({ routeWithUserSession({
path: "/queries", path: "/queries",
title: "Queries", title: "Queries",
render: pageProps => <QueriesListPage {...pageProps} currentPage="all" />, render: (pageProps) => <QueriesListPage {...pageProps} currentPage="all" />,
}) })
); );
routes.register( routes.register(
@@ -215,7 +216,7 @@ routes.register(
routeWithUserSession({ routeWithUserSession({
path: "/queries/favorites", path: "/queries/favorites",
title: "Favorite Queries", title: "Favorite Queries",
render: pageProps => <QueriesListPage {...pageProps} currentPage="favorites" />, render: (pageProps) => <QueriesListPage {...pageProps} currentPage="favorites" />,
}) })
); );
routes.register( routes.register(
@@ -223,7 +224,7 @@ routes.register(
routeWithUserSession({ routeWithUserSession({
path: "/queries/archive", path: "/queries/archive",
title: "Archived Queries", title: "Archived Queries",
render: pageProps => <QueriesListPage {...pageProps} currentPage="archive" />, render: (pageProps) => <QueriesListPage {...pageProps} currentPage="archive" />,
}) })
); );
routes.register( routes.register(
@@ -231,6 +232,6 @@ routes.register(
routeWithUserSession({ routeWithUserSession({
path: "/queries/my", path: "/queries/my",
title: "My Queries", title: "My Queries",
render: pageProps => <QueriesListPage {...pageProps} currentPage="my" />, render: (pageProps) => <QueriesListPage {...pageProps} currentPage="my" />,
}) })
); );

View File

@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
import React from "react"; import React from "react";
export function QuerySourceTypeIcon(props) { export function QuerySourceTypeIcon(props) {
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />; return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
} }
QuerySourceTypeIcon.propTypes = { QuerySourceTypeIcon.propTypes = {

View File

@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
<div className="query-results-empty-state"> <div className="query-results-empty-state">
<div className="empty-state-content"> <div className="empty-state-content">
<div> <div>
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" /> <img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
</div> </div>
<h3>{title}</h3> <h3>{title}</h3>
<div className="m-b-20">{message}</div> <div className="m-b-20">{message}</div>
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) { function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
const handleDelete = useCallback( const handleDelete = useCallback(
e => { (e) => {
e.stopPropagation(); e.stopPropagation();
Modal.confirm({ Modal.confirm({
title: "Delete Visualization", title: "Delete Visualization",
@@ -111,7 +111,8 @@ export default function QueryVisualizationTabs({
className="add-visualization-button" className="add-visualization-button"
data-test="NewVisualization" data-test="NewVisualization"
type="link" type="link"
onClick={() => onAddVisualization()}> onClick={() => onAddVisualization()}
>
<i className="fa fa-plus" aria-hidden="true" /> <i className="fa fa-plus" aria-hidden="true" />
<span className="m-l-5 hidden-xs">Add Visualization</span> <span className="m-l-5 hidden-xs">Add Visualization</span>
</Button> </Button>
@@ -119,7 +120,7 @@ export default function QueryVisualizationTabs({
} }
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]); const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]); const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
const isMobile = useMedia({ maxWidth: 768 }); const isMobile = useMedia({ maxWidth: 768 });
const [filters, setFilters] = useState([]); const [filters, setFilters] = useState([]);
@@ -132,9 +133,10 @@ export default function QueryVisualizationTabs({
data-test="QueryPageVisualizationTabs" data-test="QueryPageVisualizationTabs"
animated={false} animated={false}
tabBarGutter={0} tabBarGutter={0}
onChange={activeKey => onChangeTab(+activeKey)} onChange={(activeKey) => onChangeTab(+activeKey)}
destroyInactiveTabPane> destroyInactiveTabPane
{orderedVisualizations.map(visualization => ( >
{orderedVisualizations.map((visualization) => (
<TabPane <TabPane
key={`${visualization.id}`} key={`${visualization.id}`}
tab={ tab={
@@ -144,7 +146,8 @@ export default function QueryVisualizationTabs({
visualizationName={visualization.name} visualizationName={visualization.name}
onDelete={() => onDeleteVisualization(visualization.id)} onDelete={() => onDeleteVisualization(visualization.id)}
/> />
}> }
>
{queryResult ? ( {queryResult ? (
<VisualizationRenderer <VisualizationRenderer
visualization={visualization} visualization={visualization}

View File

@@ -1,16 +1,11 @@
import { useCallback, useMemo, useState } from "react"; import { useCallback, useMemo, useState } from "react";
import { reduce } from "lodash";
import localOptions from "@/lib/localOptions"; import localOptions from "@/lib/localOptions";
function calculateTokensCount(schema) {
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
}
export default function useAutocompleteFlags(schema) { export default function useAutocompleteFlags(schema) {
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]); const isAvailable = true;
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true)); const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
const toggleAutocomplete = useCallback(state => { const toggleAutocomplete = useCallback((state) => {
setIsEnabled(state); setIsEnabled(state);
localOptions.set("liveAutocomplete", state); localOptions.set("liveAutocomplete", state);
}, []); }, []);

View File

@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
export const SCHEMA_NOT_SUPPORTED = 1; export const SCHEMA_NOT_SUPPORTED = 1;
export const SCHEMA_LOAD_ERROR = 2; export const SCHEMA_LOAD_ERROR = 2;
export const IMG_ROOT = "static/images/db-logos"; export const IMG_ROOT = "/static/images/db-logos";
function mapSchemaColumnsToObject(columns) { function mapSchemaColumnsToObject(columns) {
return map(columns, column => (isObject(column) ? column : { name: column })); return map(columns, (column) => (isObject(column) ? column : { name: column }));
} }
const DataSource = { const DataSource = {
query: () => axios.get("api/data_sources"), query: () => axios.get("api/data_sources"),
get: ({ id }) => axios.get(`api/data_sources/${id}`), get: ({ id }) => axios.get(`api/data_sources/${id}`),
types: () => axios.get("api/data_sources/types"), types: () => axios.get("api/data_sources/types"),
create: data => axios.post(`api/data_sources`, data), create: (data) => axios.post(`api/data_sources`, data),
save: data => axios.post(`api/data_sources/${data.id}`, data), save: (data) => axios.post(`api/data_sources/${data.id}`, data),
test: data => axios.post(`api/data_sources/${data.id}/test`), test: (data) => axios.post(`api/data_sources/${data.id}/test`),
delete: ({ id }) => axios.delete(`api/data_sources/${id}`), delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
fetchSchema: (data, refresh = false) => { fetchSchema: (data, refresh = false) => {
const params = {}; const params = {};
@@ -27,15 +27,15 @@ const DataSource = {
return axios return axios
.get(`api/data_sources/${data.id}/schema`, { params }) .get(`api/data_sources/${data.id}/schema`, { params })
.then(data => { .then((data) => {
if (has(data, "job")) { if (has(data, "job")) {
return fetchDataFromJob(data.job.id).catch(error => return fetchDataFromJob(data.job.id).catch((error) =>
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error)) error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
); );
} }
return has(data, "schema") ? data.schema : Promise.reject(); return has(data, "schema") ? data.schema : Promise.reject();
}) })
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) }))); .then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
}, },
}; };

View File

@@ -9,7 +9,7 @@ const logger = debug("redash:services:QueryResult");
const filterTypes = ["filter", "multi-filter", "multiFilter"]; const filterTypes = ["filter", "multi-filter", "multiFilter"];
function defer() { function defer() {
const result = { onStatusChange: status => {} }; const result = { onStatusChange: (status) => {} };
result.promise = new Promise((resolve, reject) => { result.promise = new Promise((resolve, reject) => {
result.resolve = resolve; result.resolve = resolve;
result.reject = reject; result.reject = reject;
@@ -40,13 +40,13 @@ function getColumnNameWithoutType(column) {
} }
function getColumnFriendlyName(column) { function getColumnFriendlyName(column) {
return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, a => a.toUpperCase()); return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, (a) => a.toUpperCase());
} }
const createOrSaveUrl = data => (data.id ? `api/query_results/${data.id}` : "api/query_results"); const createOrSaveUrl = (data) => (data.id ? `api/query_results/${data.id}` : "api/query_results");
const QueryResultResource = { const QueryResultResource = {
get: ({ id }) => axios.get(`api/query_results/${id}`), get: ({ id }) => axios.get(`api/query_results/${id}`),
post: data => axios.post(createOrSaveUrl(data), data), post: (data) => axios.post(createOrSaveUrl(data), data),
}; };
export const ExecutionStatus = { export const ExecutionStatus = {
@@ -97,11 +97,11 @@ function handleErrorResponse(queryResult, error) {
} }
function sleep(ms) { function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise((resolve) => setTimeout(resolve, ms));
} }
export function fetchDataFromJob(jobId, interval = 1000) { export function fetchDataFromJob(jobId, interval = 1000) {
return axios.get(`api/jobs/${jobId}`).then(data => { return axios.get(`api/jobs/${jobId}`).then((data) => {
const status = statuses[data.job.status]; const status = statuses[data.job.status];
if (status === ExecutionStatus.WAITING || status === ExecutionStatus.PROCESSING) { if (status === ExecutionStatus.WAITING || status === ExecutionStatus.PROCESSING) {
return sleep(interval).then(() => fetchDataFromJob(data.job.id)); return sleep(interval).then(() => fetchDataFromJob(data.job.id));
@@ -146,7 +146,7 @@ class QueryResult {
// TODO: we should stop manipulating incoming data, and switch to relaying // TODO: we should stop manipulating incoming data, and switch to relaying
// on the column type set by the backend. This logic is prone to errors, // on the column type set by the backend. This logic is prone to errors,
// and better be removed. Kept for now, for backward compatability. // and better be removed. Kept for now, for backward compatability.
each(this.query_result.data.rows, row => { each(this.query_result.data.rows, (row) => {
forOwn(row, (v, k) => { forOwn(row, (v, k) => {
let newType = null; let newType = null;
if (isNumber(v)) { if (isNumber(v)) {
@@ -173,7 +173,7 @@ class QueryResult {
}); });
}); });
each(this.query_result.data.columns, column => { each(this.query_result.data.columns, (column) => {
column.name = "" + column.name; column.name = "" + column.name;
if (columnTypes[column.name]) { if (columnTypes[column.name]) {
if (column.type == null || column.type === "string") { if (column.type == null || column.type === "string") {
@@ -265,14 +265,14 @@ class QueryResult {
getColumnNames() { getColumnNames() {
if (this.columnNames === undefined && this.query_result.data) { if (this.columnNames === undefined && this.query_result.data) {
this.columnNames = this.query_result.data.columns.map(v => v.name); this.columnNames = this.query_result.data.columns.map((v) => v.name);
} }
return this.columnNames; return this.columnNames;
} }
getColumnFriendlyNames() { getColumnFriendlyNames() {
return this.getColumnNames().map(col => getColumnFriendlyName(col)); return this.getColumnNames().map((col) => getColumnFriendlyName(col));
} }
getTruncated() { getTruncated() {
@@ -286,7 +286,7 @@ class QueryResult {
const filters = []; const filters = [];
this.getColumns().forEach(col => { this.getColumns().forEach((col) => {
const name = col.name; const name = col.name;
const type = name.split("::")[1] || name.split("__")[1]; const type = name.split("::")[1] || name.split("__")[1];
if (includes(filterTypes, type)) { if (includes(filterTypes, type)) {
@@ -302,8 +302,8 @@ class QueryResult {
} }
}, this); }, this);
this.getRawData().forEach(row => { this.getRawData().forEach((row) => {
filters.forEach(filter => { filters.forEach((filter) => {
filter.values.push(row[filter.name]); filter.values.push(row[filter.name]);
if (filter.values.length === 1) { if (filter.values.length === 1) {
if (filter.multiple) { if (filter.multiple) {
@@ -315,8 +315,8 @@ class QueryResult {
}); });
}); });
filters.forEach(filter => { filters.forEach((filter) => {
filter.values = uniqBy(filter.values, v => { filter.values = uniqBy(filter.values, (v) => {
if (moment.isMoment(v)) { if (moment.isMoment(v)) {
return v.unix(); return v.unix();
} }
@@ -345,12 +345,12 @@ class QueryResult {
axios axios
.get(`api/queries/${queryId}/results/${id}.json`) .get(`api/queries/${queryId}/results/${id}.json`)
.then(response => { .then((response) => {
// Success handler // Success handler
queryResult.isLoadingResult = false; queryResult.isLoadingResult = false;
queryResult.update(response); queryResult.update(response);
}) })
.catch(error => { .catch((error) => {
// Error handler // Error handler
queryResult.isLoadingResult = false; queryResult.isLoadingResult = false;
handleErrorResponse(queryResult, error); handleErrorResponse(queryResult, error);
@@ -362,10 +362,10 @@ class QueryResult {
loadLatestCachedResult(queryId, parameters) { loadLatestCachedResult(queryId, parameters) {
axios axios
.post(`api/queries/${queryId}/results`, { queryId, parameters }) .post(`api/queries/${queryId}/results`, { queryId, parameters })
.then(response => { .then((response) => {
this.update(response); this.update(response);
}) })
.catch(error => { .catch((error) => {
handleErrorResponse(this, error); handleErrorResponse(this, error);
}); });
} }
@@ -375,11 +375,11 @@ class QueryResult {
this.deferred.onStatusChange(ExecutionStatus.LOADING_RESULT); this.deferred.onStatusChange(ExecutionStatus.LOADING_RESULT);
QueryResultResource.get({ id: this.job.query_result_id }) QueryResultResource.get({ id: this.job.query_result_id })
.then(response => { .then((response) => {
this.update(response); this.update(response);
this.isLoadingResult = false; this.isLoadingResult = false;
}) })
.catch(error => { .catch((error) => {
if (tryCount === undefined) { if (tryCount === undefined) {
tryCount = 0; tryCount = 0;
} }
@@ -394,9 +394,12 @@ class QueryResult {
}); });
this.isLoadingResult = false; this.isLoadingResult = false;
} else { } else {
setTimeout(() => { setTimeout(
() => {
this.loadResult(tryCount + 1); this.loadResult(tryCount + 1);
}, 1000 * Math.pow(2, tryCount)); },
1000 * Math.pow(2, tryCount)
);
} }
}); });
} }
@@ -410,19 +413,26 @@ class QueryResult {
: axios.get(`api/queries/${query}/jobs/${this.job.id}`); : axios.get(`api/queries/${query}/jobs/${this.job.id}`);
request request
.then(jobResponse => { .then((jobResponse) => {
this.update(jobResponse); this.update(jobResponse);
if (this.getStatus() === "processing" && this.job.query_result_id && this.job.query_result_id !== "None") { if (this.getStatus() === "processing" && this.job.query_result_id && this.job.query_result_id !== "None") {
loadResult(); loadResult();
} else if (this.getStatus() !== "failed") { } else if (this.getStatus() !== "failed") {
const waitTime = tryNumber > 10 ? 3000 : 500; let waitTime;
if (tryNumber <= 10) {
waitTime = 500;
} else if (tryNumber <= 50) {
waitTime = 1000;
} else {
waitTime = 3000;
}
setTimeout(() => { setTimeout(() => {
this.refreshStatus(query, parameters, tryNumber + 1); this.refreshStatus(query, parameters, tryNumber + 1);
}, waitTime); }, waitTime);
} }
}) })
.catch(error => { .catch((error) => {
logger("Connection error", error); logger("Connection error", error);
// TODO: use QueryResultError, or better yet: exception/reject of promise. // TODO: use QueryResultError, or better yet: exception/reject of promise.
this.update({ this.update({
@@ -451,14 +461,14 @@ class QueryResult {
axios axios
.post(`api/queries/${id}/results`, { id, parameters, apply_auto_limit: applyAutoLimit, max_age: maxAge }) .post(`api/queries/${id}/results`, { id, parameters, apply_auto_limit: applyAutoLimit, max_age: maxAge })
.then(response => { .then((response) => {
queryResult.update(response); queryResult.update(response);
if ("job" in response) { if ("job" in response) {
queryResult.refreshStatus(id, parameters); queryResult.refreshStatus(id, parameters);
} }
}) })
.catch(error => { .catch((error) => {
handleErrorResponse(queryResult, error); handleErrorResponse(queryResult, error);
}); });
@@ -481,14 +491,14 @@ class QueryResult {
} }
QueryResultResource.post(params) QueryResultResource.post(params)
.then(response => { .then((response) => {
queryResult.update(response); queryResult.update(response);
if ("job" in response) { if ("job" in response) {
queryResult.refreshStatus(query, parameters); queryResult.refreshStatus(query, parameters);
} }
}) })
.catch(error => { .catch((error) => {
handleErrorResponse(queryResult, error); handleErrorResponse(queryResult, error);
}); });

View File

@@ -3,36 +3,26 @@
* @param should Passed to should expression after plot points are captured * @param should Passed to should expression after plot points are captured
*/ */
export function assertPlotPreview(should = "exist") { export function assertPlotPreview(should = "exist") {
cy.getByTestId("VisualizationPreview") cy.getByTestId("VisualizationPreview").find("g.overplot").should("exist").find("g.points").should(should);
.find("g.plot")
.should("exist")
.find("g.points")
.should(should);
} }
export function createChartThroughUI(chartName, chartSpecificAssertionFn = () => {}) { export function createChartThroughUI(chartName, chartSpecificAssertionFn = () => {}) {
cy.getByTestId("NewVisualization").click(); cy.getByTestId("NewVisualization").click();
cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.CHART"); cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.CHART");
cy.getByTestId("VisualizationName") cy.getByTestId("VisualizationName").clear().type(chartName);
.clear()
.type(chartName);
chartSpecificAssertionFn(); chartSpecificAssertionFn();
cy.server(); cy.server();
cy.route("POST", "**/api/visualizations").as("SaveVisualization"); cy.route("POST", "**/api/visualizations").as("SaveVisualization");
cy.getByTestId("EditVisualizationDialog") cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
.contains("button", "Save")
.click();
cy.getByTestId("QueryPageVisualizationTabs") cy.getByTestId("QueryPageVisualizationTabs").contains("span", chartName).should("exist");
.contains("span", chartName)
.should("exist");
cy.wait("@SaveVisualization").should("have.property", "status", 200); cy.wait("@SaveVisualization").should("have.property", "status", 200);
return cy.get("@SaveVisualization").then(xhr => { return cy.get("@SaveVisualization").then((xhr) => {
const { id, name, options } = xhr.response.body; const { id, name, options } = xhr.response.body;
return cy.wrap({ id, name, options }); return cy.wrap({ id, name, options });
}); });
@@ -42,19 +32,13 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
cy.getByTestId("Chart.GlobalSeriesType").should("exist"); cy.getByTestId("Chart.GlobalSeriesType").should("exist");
cy.getByTestId("VisualizationEditor.Tabs.Series").click(); cy.getByTestId("VisualizationEditor.Tabs.Series").click();
cy.getByTestId("VisualizationEditor") cy.getByTestId("VisualizationEditor").find("table").should("exist");
.find("table")
.should("exist");
cy.getByTestId("VisualizationEditor.Tabs.Colors").click(); cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
cy.getByTestId("VisualizationEditor") cy.getByTestId("VisualizationEditor").find("table").should("exist");
.find("table")
.should("exist");
cy.getByTestId("VisualizationEditor.Tabs.DataLabels").click(); cy.getByTestId("VisualizationEditor.Tabs.DataLabels").click();
cy.getByTestId("VisualizationEditor") cy.getByTestId("VisualizationEditor").getByTestId("Chart.DataLabels.ShowDataLabels").should("exist");
.getByTestId("Chart.DataLabels.ShowDataLabels")
.should("exist");
chartSpecificTabbedEditorAssertionFn(); chartSpecificTabbedEditorAssertionFn();
@@ -63,39 +47,29 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) { export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) {
cy.getByTestId("VisualizationEditor.Tabs.XAxis").click(); cy.getByTestId("VisualizationEditor.Tabs.XAxis").click();
cy.getByTestId("Chart.XAxis.Type") cy.getByTestId("Chart.XAxis.Type").contains(".ant-select-selection-item", "Auto Detect").should("exist");
.contains(".ant-select-selection-item", "Auto Detect")
.should("exist");
cy.getByTestId("Chart.XAxis.Name") cy.getByTestId("Chart.XAxis.Name").clear().type(xaxisLabel);
.clear()
.type(xaxisLabel);
cy.getByTestId("VisualizationEditor.Tabs.YAxis").click(); cy.getByTestId("VisualizationEditor.Tabs.YAxis").click();
cy.getByTestId("Chart.LeftYAxis.Type") cy.getByTestId("Chart.LeftYAxis.Type").contains(".ant-select-selection-item", "Linear").should("exist");
.contains(".ant-select-selection-item", "Linear")
.should("exist");
cy.getByTestId("Chart.LeftYAxis.Name") cy.getByTestId("Chart.LeftYAxis.Name").clear().type(yaxisLabel);
.clear()
.type(yaxisLabel);
cy.getByTestId("Chart.LeftYAxis.TickFormat") cy.getByTestId("Chart.LeftYAxis.TickFormat").clear().type("+");
.clear()
.type("+");
cy.getByTestId("VisualizationEditor.Tabs.General").click(); cy.getByTestId("VisualizationEditor.Tabs.General").click();
} }
export function createDashboardWithCharts(title, chartGetters, widgetsAssertionFn = () => {}) { export function createDashboardWithCharts(title, chartGetters, widgetsAssertionFn = () => {}) {
cy.createDashboard(title).then(dashboard => { cy.createDashboard(title).then((dashboard) => {
const dashboardUrl = `/dashboards/${dashboard.id}`; const dashboardUrl = `/dashboards/${dashboard.id}`;
const widgetGetters = chartGetters.map(chartGetter => `${chartGetter}Widget`); const widgetGetters = chartGetters.map((chartGetter) => `${chartGetter}Widget`);
chartGetters.forEach((chartGetter, i) => { chartGetters.forEach((chartGetter, i) => {
const position = { autoHeight: false, sizeY: 8, sizeX: 3, col: (i % 2) * 3 }; const position = { autoHeight: false, sizeY: 8, sizeX: 3, col: (i % 2) * 3 };
cy.get(`@${chartGetter}`) cy.get(`@${chartGetter}`)
.then(chart => cy.addWidget(dashboard.id, chart.id, { position })) .then((chart) => cy.addWidget(dashboard.id, chart.id, { position }))
.as(widgetGetters[i]); .as(widgetGetters[i]);
}); });

View File

@@ -1,6 +1,6 @@
{ {
"name": "redash-client", "name": "redash-client",
"version": "25.02.0-dev", "version": "25.06.0-dev",
"description": "The frontend part of Redash.", "description": "The frontend part of Redash.",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
@@ -143,6 +143,7 @@
"react-refresh": "^0.14.0", "react-refresh": "^0.14.0",
"react-test-renderer": "^16.14.0", "react-test-renderer": "^16.14.0",
"request-cookies": "^1.1.0", "request-cookies": "^1.1.0",
"source-map-loader": "^1.1.3",
"style-loader": "^2.0.0", "style-loader": "^2.0.0",
"typescript": "^4.1.2", "typescript": "^4.1.2",
"url-loader": "^4.1.1", "url-loader": "^4.1.1",

2819
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@ force-exclude = '''
[tool.poetry] [tool.poetry]
name = "redash" name = "redash"
version = "25.02.0-dev" version = "25.06.0-dev"
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data." description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
authors = ["Arik Fraimovich <arik@redash.io>"] authors = ["Arik Fraimovich <arik@redash.io>"]
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord # to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
@@ -88,13 +88,14 @@ tzlocal = "4.3.1"
pyodbc = "5.1.0" pyodbc = "5.1.0"
debugpy = "^1.8.9" debugpy = "^1.8.9"
paramiko = "3.4.1" paramiko = "3.4.1"
oracledb = "2.5.1"
[tool.poetry.group.all_ds] [tool.poetry.group.all_ds]
optional = true optional = true
[tool.poetry.group.all_ds.dependencies] [tool.poetry.group.all_ds.dependencies]
atsd-client = "3.0.5" atsd-client = "3.0.5"
azure-kusto-data = "0.0.35" azure-kusto-data = "5.0.1"
boto3 = "1.28.8" boto3 = "1.28.8"
botocore = "1.31.8" botocore = "1.31.8"
cassandra-driver = "3.21.0" cassandra-driver = "3.21.0"
@@ -109,11 +110,11 @@ influxdb = "5.2.3"
influxdb-client = "1.38.0" influxdb-client = "1.38.0"
memsql = "3.2.0" memsql = "3.2.0"
mysqlclient = "2.1.1" mysqlclient = "2.1.1"
numpy = "1.24.4"
nzalchemy = "^11.0.2" nzalchemy = "^11.0.2"
nzpy = ">=1.15" nzpy = ">=1.15"
oauth2client = "4.1.3" oauth2client = "4.1.3"
openpyxl = "3.0.7" openpyxl = "3.0.7"
oracledb = "2.1.2"
pandas = "1.3.4" pandas = "1.3.4"
phoenixdb = "0.7" phoenixdb = "0.7"
pinotdb = ">=0.4.5" pinotdb = ">=0.4.5"

View File

@@ -14,7 +14,7 @@ from redash.app import create_app # noqa
from redash.destinations import import_destinations from redash.destinations import import_destinations
from redash.query_runner import import_query_runners from redash.query_runner import import_query_runners
__version__ = "25.02.0-dev" __version__ = "25.06.0-dev"
if os.environ.get("REMOTE_DEBUG"): if os.environ.get("REMOTE_DEBUG"):

View File

@@ -1,3 +1,5 @@
import html
import json
import logging import logging
from copy import deepcopy from copy import deepcopy
@@ -37,31 +39,83 @@ class Webex(BaseDestination):
@staticmethod @staticmethod
def formatted_attachments_template(subject, description, query_link, alert_link): def formatted_attachments_template(subject, description, query_link, alert_link):
return [ # Attempt to parse the description to find a 2D array
{ try:
"contentType": "application/vnd.microsoft.card.adaptive", # Extract the part of the description that looks like a JSON array
"content": { start_index = description.find("[")
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json", end_index = description.rfind("]") + 1
"type": "AdaptiveCard", json_array_str = description[start_index:end_index]
"version": "1.0",
"body": [ # Decode HTML entities
json_array_str = html.unescape(json_array_str)
# Replace single quotes with double quotes for valid JSON
json_array_str = json_array_str.replace("'", '"')
# Load the JSON array
data_array = json.loads(json_array_str)
# Check if it's a 2D array
if isinstance(data_array, list) and all(isinstance(i, list) for i in data_array):
# Create a table for the Adaptive Card
table_rows = []
for row in data_array:
table_rows.append(
{ {
"type": "ColumnSet", "type": "ColumnSet",
"columns": [ "columns": [
{ {"type": "Column", "items": [{"type": "TextBlock", "text": str(item), "wrap": True}]}
"type": "Column", for item in row
"width": 4, ],
"items": [ }
)
# Create the body of the card with the table
body = (
[
{ {
"type": "TextBlock", "type": "TextBlock",
"text": {subject}, "text": f"{subject}",
"weight": "bolder", "weight": "bolder",
"size": "medium", "size": "medium",
"wrap": True, "wrap": True,
}, },
{ {
"type": "TextBlock", "type": "TextBlock",
"text": {description}, "text": f"{description[:start_index]}",
"isSubtle": True,
"wrap": True,
},
]
+ table_rows
+ [
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
)
else:
# Fallback to the original description if no valid 2D array is found
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description}",
"isSubtle": True, "isSubtle": True,
"wrap": True, "wrap": True,
}, },
@@ -77,11 +131,45 @@ class Webex(BaseDestination):
"wrap": True, "wrap": True,
"isSubtle": True, "isSubtle": True,
}, },
], ]
except json.JSONDecodeError:
# If parsing fails, fallback to the original description
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
}, },
], {
} "type": "TextBlock",
], "text": f"{description}",
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
return [
{
"contentType": "application/vnd.microsoft.card.adaptive",
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.0",
"body": body,
}, },
} }
] ]
@@ -116,6 +204,10 @@ class Webex(BaseDestination):
# destinations is guaranteed to be a comma-separated string # destinations is guaranteed to be a comma-separated string
for destination_id in destinations.split(","): for destination_id in destinations.split(","):
destination_id = destination_id.strip() # Remove any leading or trailing whitespace
if not destination_id: # Check if the destination_id is empty or blank
continue # Skip to the next iteration if it's empty or blank
payload = deepcopy(template_payload) payload = deepcopy(template_payload)
payload[payload_tag] = destination_id payload[payload_tag] = destination_id
self.post_message(payload, headers) self.post_message(payload, headers)

View File

@@ -908,6 +908,7 @@ def next_state(op, value, threshold):
# boolean value is Python specific and most likely will be confusing to # boolean value is Python specific and most likely will be confusing to
# users. # users.
value = str(value).lower() value = str(value).lower()
value_is_number = False
else: else:
try: try:
value = float(value) value = float(value)

View File

@@ -288,7 +288,10 @@ class BaseSQLQueryRunner(BaseQueryRunner):
return True return True
def query_is_select_no_limit(self, query): def query_is_select_no_limit(self, query):
parsed_query = sqlparse.parse(query)[0] parsed_query_list = sqlparse.parse(query)
if len(parsed_query_list) == 0:
return False
parsed_query = parsed_query_list[0]
last_keyword_idx = find_last_keyword_idx(parsed_query) last_keyword_idx = find_last_keyword_idx(parsed_query)
# Either invalid query or query that is not select # Either invalid query or query that is not select
if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT": if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT":

View File

@@ -11,12 +11,12 @@ from redash.query_runner import (
from redash.utils import json_loads from redash.utils import json_loads
try: try:
from azure.kusto.data.exceptions import KustoServiceError from azure.kusto.data import (
from azure.kusto.data.request import (
ClientRequestProperties, ClientRequestProperties,
KustoClient, KustoClient,
KustoConnectionStringBuilder, KustoConnectionStringBuilder,
) )
from azure.kusto.data.exceptions import KustoServiceError
enabled = True enabled = True
except ImportError: except ImportError:
@@ -37,6 +37,34 @@ TYPES_MAP = {
} }
def _get_data_scanned(kusto_response):
try:
metadata_table = next(
(table for table in kusto_response.tables if table.table_name == "QueryCompletionInformation"),
None,
)
if metadata_table:
resource_usage_json = next(
(row["Payload"] for row in metadata_table.rows if row["EventTypeName"] == "QueryResourceConsumption"),
"{}",
)
resource_usage = json_loads(resource_usage_json).get("resource_usage", {})
data_scanned = (
resource_usage["cache"]["shards"]["cold"]["hitbytes"]
+ resource_usage["cache"]["shards"]["cold"]["missbytes"]
+ resource_usage["cache"]["shards"]["hot"]["hitbytes"]
+ resource_usage["cache"]["shards"]["hot"]["missbytes"]
+ resource_usage["cache"]["shards"]["bypassbytes"]
)
except Exception:
data_scanned = 0
return int(data_scanned)
class AzureKusto(BaseQueryRunner): class AzureKusto(BaseQueryRunner):
should_annotate_query = False should_annotate_query = False
noop_query = "let noop = datatable (Noop:string)[1]; noop" noop_query = "let noop = datatable (Noop:string)[1]; noop"
@@ -44,8 +72,6 @@ class AzureKusto(BaseQueryRunner):
def __init__(self, configuration): def __init__(self, configuration):
super(AzureKusto, self).__init__(configuration) super(AzureKusto, self).__init__(configuration)
self.syntax = "custom" self.syntax = "custom"
self.client_request_properties = ClientRequestProperties()
self.client_request_properties.application = "redash"
@classmethod @classmethod
def configuration_schema(cls): def configuration_schema(cls):
@@ -60,12 +86,14 @@ class AzureKusto(BaseQueryRunner):
}, },
"azure_ad_tenant_id": {"type": "string", "title": "Azure AD Tenant Id"}, "azure_ad_tenant_id": {"type": "string", "title": "Azure AD Tenant Id"},
"database": {"type": "string"}, "database": {"type": "string"},
"msi": {"type": "boolean", "title": "Use Managed Service Identity"},
"user_msi": {
"type": "string",
"title": "User-assigned managed identity client ID",
},
}, },
"required": [ "required": [
"cluster", "cluster",
"azure_ad_client_id",
"azure_ad_client_secret",
"azure_ad_tenant_id",
"database", "database",
], ],
"order": [ "order": [
@@ -91,18 +119,48 @@ class AzureKusto(BaseQueryRunner):
return "Azure Data Explorer (Kusto)" return "Azure Data Explorer (Kusto)"
def run_query(self, query, user): def run_query(self, query, user):
cluster = self.configuration["cluster"]
msi = self.configuration.get("msi", False)
# Managed Service Identity(MSI)
if msi:
# If user-assigned managed identity is used, the client ID must be provided
if self.configuration.get("user_msi"):
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(
cluster,
client_id=self.configuration["user_msi"],
)
else:
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(cluster)
# Service Principal auth
else:
aad_app_id = self.configuration.get("azure_ad_client_id")
app_key = self.configuration.get("azure_ad_client_secret")
authority_id = self.configuration.get("azure_ad_tenant_id")
if not (aad_app_id and app_key and authority_id):
raise ValueError(
"Azure AD Client ID, Client Secret, and Tenant ID are required for Service Principal authentication."
)
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication( kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
connection_string=self.configuration["cluster"], connection_string=cluster,
aad_app_id=self.configuration["azure_ad_client_id"], aad_app_id=aad_app_id,
app_key=self.configuration["azure_ad_client_secret"], app_key=app_key,
authority_id=self.configuration["azure_ad_tenant_id"], authority_id=authority_id,
) )
client = KustoClient(kcsb) client = KustoClient(kcsb)
request_properties = ClientRequestProperties()
request_properties.application = "redash"
if user:
request_properties.user = user.email
request_properties.set_option("request_description", user.email)
db = self.configuration["database"] db = self.configuration["database"]
try: try:
response = client.execute(db, query, self.client_request_properties) response = client.execute(db, query, request_properties)
result_cols = response.primary_results[0].columns result_cols = response.primary_results[0].columns
result_rows = response.primary_results[0].rows result_rows = response.primary_results[0].rows
@@ -123,14 +181,15 @@ class AzureKusto(BaseQueryRunner):
rows.append(row.to_dict()) rows.append(row.to_dict())
error = None error = None
data = {"columns": columns, "rows": rows} data = {
"columns": columns,
"rows": rows,
"metadata": {"data_scanned": _get_data_scanned(response)},
}
except KustoServiceError as err: except KustoServiceError as err:
data = None data = None
try: error = str(err)
error = err.args[1][0]["error"]["@message"]
except (IndexError, KeyError):
error = err.args[1]
return data, error return data, error
@@ -143,7 +202,10 @@ class AzureKusto(BaseQueryRunner):
self._handle_run_query_error(error) self._handle_run_query_error(error)
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"]) schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values() tables_list = [
*(schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()),
*(schema_as_json["Databases"][self.configuration["database"]]["MaterializedViews"].values()),
]
schema = {} schema = {}
@@ -154,7 +216,9 @@ class AzureKusto(BaseQueryRunner):
schema[table_name] = {"name": table_name, "columns": []} schema[table_name] = {"name": table_name, "columns": []}
for column in table["OrderedColumns"]: for column in table["OrderedColumns"]:
schema[table_name]["columns"].append(column["Name"]) schema[table_name]["columns"].append(
{"name": column["Name"], "type": TYPES_MAP.get(column["CslType"], None)}
)
return list(schema.values()) return list(schema.values())

View File

@@ -12,7 +12,7 @@ from redash.query_runner import (
TYPE_FLOAT, TYPE_FLOAT,
TYPE_INTEGER, TYPE_INTEGER,
TYPE_STRING, TYPE_STRING,
BaseQueryRunner, BaseSQLQueryRunner,
InterruptException, InterruptException,
JobTimeoutException, JobTimeoutException,
register, register,
@@ -86,7 +86,7 @@ def _get_query_results(jobs, project_id, location, job_id, start_index):
).execute() ).execute()
logging.debug("query_reply %s", query_reply) logging.debug("query_reply %s", query_reply)
if not query_reply["jobComplete"]: if not query_reply["jobComplete"]:
time.sleep(10) time.sleep(1)
return _get_query_results(jobs, project_id, location, job_id, start_index) return _get_query_results(jobs, project_id, location, job_id, start_index)
return query_reply return query_reply
@@ -98,7 +98,7 @@ def _get_total_bytes_processed_for_resp(bq_response):
return int(bq_response.get("totalBytesProcessed", "0")) return int(bq_response.get("totalBytesProcessed", "0"))
class BigQuery(BaseQueryRunner): class BigQuery(BaseSQLQueryRunner):
noop_query = "SELECT 1" noop_query = "SELECT 1"
def __init__(self, configuration): def __init__(self, configuration):
@@ -304,7 +304,7 @@ class BigQuery(BaseQueryRunner):
datasets = self._get_project_datasets(project_id) datasets = self._get_project_datasets(project_id)
query_base = """ query_base = """
SELECT table_schema, table_name, field_path SELECT table_schema, table_name, field_path, data_type
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
WHERE table_schema NOT IN ('information_schema') WHERE table_schema NOT IN ('information_schema')
""" """
@@ -325,7 +325,7 @@ class BigQuery(BaseQueryRunner):
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"]) table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema: if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []} schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append(row["field_path"]) schema[table_name]["columns"].append({"name": row["field_path"], "type": row["data_type"]})
return list(schema.values()) return list(schema.values())

View File

@@ -152,7 +152,7 @@ class Mysql(BaseSQLQueryRunner):
col.table_name as table_name, col.table_name as table_name,
col.column_name as column_name col.column_name as column_name
FROM `information_schema`.`columns` col FROM `information_schema`.`columns` col
WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys'); WHERE LOWER(col.table_schema) NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
""" """
results, error = self.run_query(query, None) results, error = self.run_query(query, None)

View File

@@ -1,6 +1,6 @@
import functools import functools
from flask import session from flask import request, session
from flask_login import current_user from flask_login import current_user
from flask_talisman import talisman from flask_talisman import talisman
from flask_wtf.csrf import CSRFProtect, generate_csrf from flask_wtf.csrf import CSRFProtect, generate_csrf
@@ -35,6 +35,15 @@ def init_app(app):
@app.before_request @app.before_request
def check_csrf(): def check_csrf():
# BEGIN workaround until https://github.com/lepture/flask-wtf/pull/419 is merged
if request.blueprint in csrf._exempt_blueprints:
return
view = app.view_functions.get(request.endpoint)
if view is not None and f"{view.__module__}.{view.__name__}" in csrf._exempt_views:
return
# END workaround
if not current_user.is_authenticated or "user_id" in session: if not current_user.is_authenticated or "user_id" in session:
csrf.protect() csrf.protect()

View File

@@ -6,6 +6,7 @@ import decimal
import hashlib import hashlib
import io import io
import json import json
import math
import os import os
import random import random
import re import re
@@ -120,6 +121,17 @@ def json_loads(data, *args, **kwargs):
return json.loads(data, *args, **kwargs) return json.loads(data, *args, **kwargs)
# Convert NaN, Inf, and -Inf to None, as they are not valid JSON values.
def _sanitize_data(data):
if isinstance(data, dict):
return {k: _sanitize_data(v) for k, v in data.items()}
if isinstance(data, list):
return [_sanitize_data(v) for v in data]
if isinstance(data, float) and (math.isnan(data) or math.isinf(data)):
return None
return data
def json_dumps(data, *args, **kwargs): def json_dumps(data, *args, **kwargs):
"""A custom JSON dumping function which passes all parameters to the """A custom JSON dumping function which passes all parameters to the
json.dumps function.""" json.dumps function."""
@@ -128,7 +140,7 @@ def json_dumps(data, *args, **kwargs):
# Float value nan or inf in Python should be render to None or null in json. # Float value nan or inf in Python should be render to None or null in json.
# Using allow_nan = True will make Python render nan as NaN, leading to parse error in front-end # Using allow_nan = True will make Python render nan as NaN, leading to parse error in front-end
kwargs.setdefault("allow_nan", False) kwargs.setdefault("allow_nan", False)
return json.dumps(data, *args, **kwargs) return json.dumps(_sanitize_data(data), *args, **kwargs)
def mustache_render(template, context=None, **kwargs): def mustache_render(template, context=None, **kwargs):

View File

@@ -33,7 +33,7 @@ from sqlalchemy.orm import mapperlib
from sqlalchemy.orm.properties import ColumnProperty from sqlalchemy.orm.properties import ColumnProperty
from sqlalchemy.orm.query import _ColumnEntity from sqlalchemy.orm.query import _ColumnEntity
from sqlalchemy.orm.util import AliasedInsp from sqlalchemy.orm.util import AliasedInsp
from sqlalchemy.sql.expression import asc, desc from sqlalchemy.sql.expression import asc, desc, nullslast
def get_query_descriptor(query, entity, attr): def get_query_descriptor(query, entity, attr):
@@ -225,7 +225,7 @@ class QuerySorter:
def assign_order_by(self, entity, attr, func): def assign_order_by(self, entity, attr, func):
expr = get_query_descriptor(self.query, entity, attr) expr = get_query_descriptor(self.query, entity, attr)
if expr is not None: if expr is not None:
return self.query.order_by(func(expr)) return self.query.order_by(nullslast(func(expr)))
if not self.silent: if not self.silent:
raise QuerySorterException("Could not sort query with expression '%s'" % attr) raise QuerySorterException("Could not sort query with expression '%s'" % attr)
return self.query return self.query

View File

@@ -261,15 +261,19 @@ def test_webex_notify_calls_requests_post():
alert.name = "Test Alert" alert.name = "Test Alert"
alert.custom_subject = "Test custom subject" alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body" alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"}) alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock() query = mock.Mock()
query.id = 1 query.id = 1
user = mock.Mock() user = mock.Mock()
app = mock.Mock() app = mock.Mock()
host = "https://localhost:5000" host = "https://localhost:5000"
options = {"webex_bot_token": "abcd", "to_room_ids": "1234"} options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234,5678",
"to_person_emails": "example1@test.com,example2@test.com",
}
metadata = {"Scheduled": False} metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE new_state = Alert.TRIGGERED_STATE
@@ -277,7 +281,7 @@ def test_webex_notify_calls_requests_post():
with mock.patch("redash.destinations.webex.requests.post") as mock_post: with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock() mock_response = mock.Mock()
mock_response.status_code = 204 mock_response.status_code = 200
mock_post.return_value = mock_response mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options) destination.notify(alert, query, user, new_state, app, host, metadata, options)
@@ -285,13 +289,111 @@ def test_webex_notify_calls_requests_post():
query_link = f"{host}/queries/{query.id}" query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}" alert_link = f"{host}/alerts/{alert.id}"
formatted_attachments = Webex.formatted_attachments_template( expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload_room = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
expected_payload_email = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"toPersonEmail": "example1@test.com",
}
# Check that requests.post was called for both roomId and toPersonEmail destinations
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_room,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_email,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
def test_webex_notify_handles_blank_entries():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "",
"to_person_emails": "",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
destination.notify(alert, query, user, new_state, app, host, metadata, options)
# Ensure no API calls are made when destinations are blank
mock_post.assert_not_called()
def test_webex_notify_handles_2d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with table [['Col1', 'Col2'], ['Val1', 'Val2']]"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link alert.custom_subject, alert.custom_body, query_link, alert_link
) )
expected_payload = { expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body, "markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": formatted_attachments, "attachments": expected_attachments,
"roomId": "1234", "roomId": "1234",
} }
@@ -302,7 +404,60 @@ def test_webex_notify_calls_requests_post():
timeout=5.0, timeout=5.0,
) )
assert mock_response.status_code == 204 assert mock_response.status_code == 200
def test_webex_notify_handles_1d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with 1D array, however unlikely ['Col1', 'Col2']"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
mock_post.assert_called_once_with(
destination.api_base_url,
json=expected_payload,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
def test_datadog_notify_calls_requests_post(): def test_datadog_notify_calls_requests_post():

View File

@@ -0,0 +1,42 @@
from unittest import TestCase
from unittest.mock import patch
from redash.query_runner.azure_kusto import AzureKusto
class TestAzureKusto(TestCase):
def setUp(self):
self.configuration = {
"cluster": "https://example.kusto.windows.net",
"database": "sample_db",
"azure_ad_client_id": "client_id",
"azure_ad_client_secret": "client_secret",
"azure_ad_tenant_id": "tenant_id",
}
self.kusto = AzureKusto(self.configuration)
@patch.object(AzureKusto, "run_query")
def test_get_schema(self, mock_run_query):
mock_response = {
"rows": [
{
"DatabaseSchema": '{"Databases":{"sample_db":{"Tables":{"Table1":{"Name":"Table1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}},"MaterializedViews":{"View1":{"Name":"View1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}}}}}'
}
]
}
mock_run_query.return_value = (mock_response, None)
expected_schema = [
{
"name": "Table1",
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
},
{
"name": "View1",
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
},
]
schema = self.kusto.get_schema()
print(schema)
self.assertEqual(schema, expected_schema)

View File

@@ -0,0 +1,31 @@
from redash.utils import json_dumps, json_loads
from tests import BaseTestCase
class TestJsonDumps(BaseTestCase):
"""
NaN, Inf, and -Inf are sanitized to None.
"""
def test_data_with_nan_is_sanitized(self):
input_data = {
"columns": [
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
],
"rows": [{"_col0": 1.0, "_col1": float("nan"), "_col2": float("inf"), "_col3": float("-inf")}],
}
expected_output_data = {
"columns": [
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
],
"rows": [{"_col0": 1.0, "_col1": None, "_col2": None, "_col3": None}],
}
json_data = json_dumps(input_data)
actual_output_data = json_loads(json_data)
self.assertEqual(actual_output_data, expected_output_data)

View File

@@ -46,7 +46,7 @@
"@types/jest": "^26.0.18", "@types/jest": "^26.0.18",
"@types/leaflet": "^1.5.19", "@types/leaflet": "^1.5.19",
"@types/numeral": "0.0.28", "@types/numeral": "0.0.28",
"@types/plotly.js": "^1.54.22", "@types/plotly.js": "^2.35.2",
"@types/react": "^17.0.0", "@types/react": "^17.0.0",
"@types/react-dom": "^17.0.0", "@types/react-dom": "^17.0.0",
"@types/tinycolor2": "^1.4.2", "@types/tinycolor2": "^1.4.2",
@@ -91,7 +91,7 @@
"leaflet.markercluster": "^1.1.0", "leaflet.markercluster": "^1.1.0",
"lodash": "^4.17.10", "lodash": "^4.17.10",
"numeral": "^2.0.6", "numeral": "^2.0.6",
"plotly.js": "1.58.5", "plotly.js": "2.35.3",
"react-pivottable": "^0.9.0", "react-pivottable": "^0.9.0",
"react-sortable-hoc": "^1.10.1", "react-sortable-hoc": "^1.10.1",
"tinycolor2": "^1.4.1", "tinycolor2": "^1.4.1",

View File

@@ -27,11 +27,13 @@
"automargin": true, "automargin": true,
"showticklabels": true, "showticklabels": true,
"title": null, "title": null,
"tickformat": null,
"type": "-" "type": "-"
}, },
"yaxis": { "yaxis": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null "range": null

View File

@@ -30,11 +30,13 @@
"automargin": true, "automargin": true,
"showticklabels": true, "showticklabels": true,
"title": null, "title": null,
"tickformat": null,
"type": "-" "type": "-"
}, },
"yaxis": { "yaxis": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null "range": null
@@ -42,6 +44,7 @@
"yaxis2": { "yaxis2": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null, "range": null,

View File

@@ -25,18 +25,21 @@
"automargin": true, "automargin": true,
"showticklabels": true, "showticklabels": true,
"title": null, "title": null,
"tickformat": null,
"type": "-" "type": "-"
}, },
"yaxis": { "yaxis": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null "range": null
}, },
"hoverlabel": { "hoverlabel": {
"namelength": -1 "namelength": -1
} },
"hovermode": "x"
} }
} }
} }

View File

@@ -28,11 +28,13 @@
"automargin": true, "automargin": true,
"showticklabels": true, "showticklabels": true,
"title": null, "title": null,
"tickformat": null,
"type": "-" "type": "-"
}, },
"yaxis": { "yaxis": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null "range": null
@@ -40,6 +42,7 @@
"yaxis2": { "yaxis2": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null, "range": null,
@@ -48,7 +51,8 @@
}, },
"hoverlabel": { "hoverlabel": {
"namelength": -1 "namelength": -1
} },
"hovermode": "x"
} }
} }
} }

View File

@@ -24,18 +24,21 @@
"automargin": true, "automargin": true,
"showticklabels": true, "showticklabels": true,
"title": null, "title": null,
"tickformat": null,
"type": "-" "type": "-"
}, },
"yaxis": { "yaxis": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null "range": null
}, },
"hoverlabel": { "hoverlabel": {
"namelength": -1 "namelength": -1
} },
"hovermode": "x"
} }
} }
} }

View File

@@ -23,18 +23,21 @@
"automargin": true, "automargin": true,
"showticklabels": true, "showticklabels": true,
"title": null, "title": null,
"tickformat": null,
"type": "-" "type": "-"
}, },
"yaxis": { "yaxis": {
"automargin": true, "automargin": true,
"title": null, "title": null,
"tickformat": null,
"type": "linear", "type": "linear",
"autorange": true, "autorange": true,
"range": null "range": null
}, },
"hoverlabel": { "hoverlabel": {
"namelength": -1 "namelength": -1
} },
"hovermode": "x"
} }
} }
} }

View File

@@ -1,5 +1,6 @@
import * as Plotly from "plotly.js"; import * as Plotly from "plotly.js";
import "./locales"
import prepareData from "./prepareData"; import prepareData from "./prepareData";
import prepareLayout from "./prepareLayout"; import prepareLayout from "./prepareLayout";
import updateData from "./updateData"; import updateData from "./updateData";
@@ -10,6 +11,8 @@ import { prepareCustomChartData, createCustomChartRenderer } from "./customChart
// @ts-expect-error ts-migrate(2339) FIXME: Property 'setPlotConfig' does not exist on type 't... Remove this comment to see the full error message // @ts-expect-error ts-migrate(2339) FIXME: Property 'setPlotConfig' does not exist on type 't... Remove this comment to see the full error message
Plotly.setPlotConfig({ Plotly.setPlotConfig({
modeBarButtonsToRemove: ["sendDataToCloud"], modeBarButtonsToRemove: ["sendDataToCloud"],
modeBarButtonsToAdd: ["togglespikelines", "v1hovermode"],
locale: window.navigator.language,
}); });
export { export {

View File

@@ -0,0 +1,230 @@
import * as Plotly from "plotly.js";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAf from "plotly.js/lib/locales/af";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAm from "plotly.js/lib/locales/am";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAr_dz from "plotly.js/lib/locales/ar-dz";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAr_eg from "plotly.js/lib/locales/ar-eg";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAr from "plotly.js/lib/locales/ar";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAz from "plotly.js/lib/locales/az";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeBg from "plotly.js/lib/locales/bg";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeBs from "plotly.js/lib/locales/bs";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeCa from "plotly.js/lib/locales/ca";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeCs from "plotly.js/lib/locales/cs";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeCy from "plotly.js/lib/locales/cy";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeDa from "plotly.js/lib/locales/da";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeDe_ch from "plotly.js/lib/locales/de-ch";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeDe from "plotly.js/lib/locales/de";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEl from "plotly.js/lib/locales/el";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEo from "plotly.js/lib/locales/eo";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEs_ar from "plotly.js/lib/locales/es-ar";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEs_pe from "plotly.js/lib/locales/es-pe";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEs from "plotly.js/lib/locales/es";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEt from "plotly.js/lib/locales/et";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEu from "plotly.js/lib/locales/eu";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFa from "plotly.js/lib/locales/fa";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFi from "plotly.js/lib/locales/fi";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFo from "plotly.js/lib/locales/fo";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFr_ch from "plotly.js/lib/locales/fr-ch";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFr from "plotly.js/lib/locales/fr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeGl from "plotly.js/lib/locales/gl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeGu from "plotly.js/lib/locales/gu";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHe from "plotly.js/lib/locales/he";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHi_in from "plotly.js/lib/locales/hi-in";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHr from "plotly.js/lib/locales/hr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHu from "plotly.js/lib/locales/hu";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHy from "plotly.js/lib/locales/hy";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeId from "plotly.js/lib/locales/id";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeIs from "plotly.js/lib/locales/is";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeIt from "plotly.js/lib/locales/it";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeJa from "plotly.js/lib/locales/ja";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeKa from "plotly.js/lib/locales/ka";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeKm from "plotly.js/lib/locales/km";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeKo from "plotly.js/lib/locales/ko";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeLt from "plotly.js/lib/locales/lt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeLv from "plotly.js/lib/locales/lv";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMe_me from "plotly.js/lib/locales/me-me";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMe from "plotly.js/lib/locales/me";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMk from "plotly.js/lib/locales/mk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMl from "plotly.js/lib/locales/ml";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMs from "plotly.js/lib/locales/ms";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMt from "plotly.js/lib/locales/mt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeNl_be from "plotly.js/lib/locales/nl-be";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeNl from "plotly.js/lib/locales/nl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeNo from "plotly.js/lib/locales/no";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePa from "plotly.js/lib/locales/pa";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePl from "plotly.js/lib/locales/pl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePt_br from "plotly.js/lib/locales/pt-br";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePt_pt from "plotly.js/lib/locales/pt-pt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeRm from "plotly.js/lib/locales/rm";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeRo from "plotly.js/lib/locales/ro";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeRu from "plotly.js/lib/locales/ru";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSk from "plotly.js/lib/locales/sk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSl from "plotly.js/lib/locales/sl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSq from "plotly.js/lib/locales/sq";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSr_sr from "plotly.js/lib/locales/sr-sr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSr from "plotly.js/lib/locales/sr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSv from "plotly.js/lib/locales/sv";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSw from "plotly.js/lib/locales/sw";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTa from "plotly.js/lib/locales/ta";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTh from "plotly.js/lib/locales/th";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTr from "plotly.js/lib/locales/tr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTt from "plotly.js/lib/locales/tt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeUk from "plotly.js/lib/locales/uk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeUr from "plotly.js/lib/locales/ur";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeVi from "plotly.js/lib/locales/vi";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeZh_cn from "plotly.js/lib/locales/zh-cn";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeZh_hk from "plotly.js/lib/locales/zh-hk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeZh_tw from "plotly.js/lib/locales/zh-tw";
(Plotly as any).register([
localeAf,
localeAm,
localeAr_dz,
localeAr_eg,
localeAr,
localeAz,
localeBg,
localeBs,
localeCa,
localeCs,
localeCy,
localeDa,
localeDe_ch,
localeDe,
localeEl,
localeEo,
localeEs_ar,
localeEs_pe,
localeEs,
localeEt,
localeEu,
localeFa,
localeFi,
localeFo,
localeFr_ch,
localeFr,
localeGl,
localeGu,
localeHe,
localeHi_in,
localeHr,
localeHu,
localeHy,
localeId,
localeIs,
localeIt,
localeJa,
localeKa,
localeKm,
localeKo,
localeLt,
localeLv,
localeMe_me,
localeMe,
localeMk,
localeMl,
localeMs,
localeMt,
localeNl_be,
localeNl,
localeNo,
localePa,
localePl,
localePt_br,
localePt_pt,
localeRm,
localeRo,
localeRu,
localeSk,
localeSl,
localeSq,
localeSr_sr,
localeSr,
localeSv,
localeSw,
localeTa,
localeTh,
localeTr,
localeTt,
localeUk,
localeUr,
localeVi,
localeZh_cn,
localeZh_hk,
localeZh_tw,
]);

View File

@@ -29,6 +29,8 @@ function prepareBarSeries(series: any, options: any, additionalOptions: any) {
series.offsetgroup = toString(additionalOptions.index); series.offsetgroup = toString(additionalOptions.index);
if (options.showDataLabels) { if (options.showDataLabels) {
series.textposition = "inside"; series.textposition = "inside";
} else {
series.textposition = "none";
} }
return series; return series;
} }
@@ -99,8 +101,8 @@ function prepareSeries(series: any, options: any, numSeries: any, additionalOpti
}; };
const sourceData = new Map(); const sourceData = new Map();
const xValues: any[] = [];
const labelsValuesMap = new Map(); const yValues: any[] = [];
const yErrorValues: any = []; const yErrorValues: any = [];
each(data, row => { each(data, row => {
@@ -108,27 +110,20 @@ function prepareSeries(series: any, options: any, numSeries: any, additionalOpti
const y = cleanYValue(row.y, seriesYAxis === "y2" ? options.yAxis[1].type : options.yAxis[0].type); // depends on series type! const y = cleanYValue(row.y, seriesYAxis === "y2" ? options.yAxis[1].type : options.yAxis[0].type); // depends on series type!
const yError = cleanNumber(row.yError); // always number const yError = cleanNumber(row.yError); // always number
const size = cleanNumber(row.size); // always number const size = cleanNumber(row.size); // always number
if (labelsValuesMap.has(x)) {
labelsValuesMap.set(x, labelsValuesMap.get(x) + y);
} else {
labelsValuesMap.set(x, y);
}
const aggregatedY = labelsValuesMap.get(x);
sourceData.set(x, { sourceData.set(x, {
x, x,
y: aggregatedY, y,
yError, yError,
size, size,
yPercent: null, // will be updated later yPercent: null, // will be updated later
row, row,
}); });
xValues.push(x);
yValues.push(y);
yErrorValues.push(yError); yErrorValues.push(yError);
}); });
const xValues = Array.from(labelsValuesMap.keys());
const yValues = Array.from(labelsValuesMap.values());
const plotlySeries = { const plotlySeries = {
visible: true, visible: true,
hoverinfo: hoverInfoPattern, hoverinfo: hoverInfoPattern,

View File

@@ -21,7 +21,7 @@ function prepareXAxis(axisOptions: any, additionalOptions: any) {
title: getAxisTitle(axisOptions), title: getAxisTitle(axisOptions),
type: getAxisScaleType(axisOptions), type: getAxisScaleType(axisOptions),
automargin: true, automargin: true,
tickformat: axisOptions.tickFormat, tickformat: axisOptions.tickFormat ?? null,
}; };
if (additionalOptions.sortX && axis.type === "category") { if (additionalOptions.sortX && axis.type === "category") {
@@ -49,7 +49,7 @@ function prepareYAxis(axisOptions: any) {
automargin: true, automargin: true,
autorange: true, autorange: true,
range: null, range: null,
tickformat: axisOptions.tickFormat, tickformat: axisOptions.tickFormat ?? null,
}; };
} }
@@ -109,7 +109,7 @@ function prepareBoxLayout(layout: any, options: any, data: any) {
} }
export default function prepareLayout(element: any, options: any, data: any) { export default function prepareLayout(element: any, options: any, data: any) {
const layout = { const layout: any = {
margin: { l: 10, r: 10, b: 5, t: 20, pad: 4 }, margin: { l: 10, r: 10, b: 5, t: 20, pad: 4 },
// plot size should be at least 5x5px // plot size should be at least 5x5px
width: Math.max(5, Math.floor(element.offsetWidth)), width: Math.max(5, Math.floor(element.offsetWidth)),
@@ -124,6 +124,10 @@ export default function prepareLayout(element: any, options: any, data: any) {
}, },
}; };
if (["line", "area", "column"].includes(options.globalSeriesType)) {
layout.hovermode = options.swappedAxes ? 'y' : 'x';
}
switch (options.globalSeriesType) { switch (options.globalSeriesType) {
case "pie": case "pie":
return preparePieLayout(layout, options, data); return preparePieLayout(layout, options, data);

File diff suppressed because it is too large Load Diff

View File

@@ -133,6 +133,11 @@ const config = {
}, },
module: { module: {
rules: [ rules: [
{
test: /\.js$/,
enforce: "pre",
use: ["source-map-loader"],
},
{ {
test: /\.(t|j)sx?$/, test: /\.(t|j)sx?$/,
exclude: /node_modules/, exclude: /node_modules/,

1924
yarn.lock

File diff suppressed because it is too large Load Diff