Compare commits

..

2 Commits

Author SHA1 Message Date
Arik Fraimovich
82b4a14d63 Update ci.yml to match latest master 2025-01-31 10:29:38 +02:00
Eric Radman
d1c255b7d8 25.1.0 release 2025-01-02 10:17:02 -05:00
98 changed files with 5625 additions and 5178 deletions

View File

@@ -2,7 +2,7 @@ name: Periodic Snapshot
on:
schedule:
- cron: '10 0 1 * *' # 10 minutes after midnight on the first day of every month
- cron: '10 0 1 * *' # 10 minutes after midnight on the first of every month
workflow_dispatch:
inputs:
bump:
@@ -24,7 +24,6 @@ permissions:
jobs:
bump-version-and-tag:
runs-on: ubuntu-latest
if: github.ref_name == github.event.repository.default_branch
steps:
- uses: actions/checkout@v4
with:

View File

@@ -32,9 +32,6 @@ jobs:
elif [[ "${{ secrets.DOCKER_PASS }}" == '' ]]; then
echo 'Docker password is empty. Skipping build+push'
echo skip=true >> "$GITHUB_OUTPUT"
elif [[ "${{ vars.DOCKER_REPOSITORY }}" == '' ]]; then
echo 'Docker repository is empty. Skipping build+push'
echo skip=true >> "$GITHUB_OUTPUT"
else
echo 'Docker user and password are set and branch is `master`.'
echo 'Building + pushing `preview` image.'
@@ -42,20 +39,7 @@ jobs:
fi
build-docker-image:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
arch:
- amd64
- arm64
include:
- arch: amd64
os: ubuntu-22.04
- arch: arm64
os: ubuntu-22.04-arm
outputs:
VERSION_TAG: ${{ steps.version.outputs.VERSION_TAG }}
runs-on: ubuntu-22.04
needs:
- build-skip-check
if: needs.build-skip-check.outputs.skip == 'false'
@@ -70,6 +54,11 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
cache: 'yarn'
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -80,8 +69,6 @@ jobs:
password: ${{ secrets.DOCKER_PASS }}
- name: Install Dependencies
env:
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: true
run: |
npm install --global --force yarn@1.22.22
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
@@ -94,92 +81,40 @@ jobs:
VERSION_TAG=$(jq -r .version package.json)
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
# TODO: We can use GitHub Actions's matrix option to reduce the build time.
- name: Build and push preview image to Docker Hub
id: build-preview
uses: docker/build-push-action@v4
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
with:
push: true
tags: |
${{ vars.DOCKER_REPOSITORY }}/redash
${{ vars.DOCKER_REPOSITORY }}/preview
redash/redash:preview
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
context: .
build-args: |
test_all_deps=true
outputs: type=image,push-by-digest=true,push=true
cache-from: type=gha,scope=${{ matrix.arch }}
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
cache-from: type=gha,scope=multi-platform
cache-to: type=gha,mode=max,scope=multi-platform
platforms: linux/amd64,linux/arm64
env:
DOCKER_CONTENT_TRUST: true
- name: Build and push release image to Docker Hub
id: build-release
uses: docker/build-push-action@v4
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
with:
push: true
tags: |
${{ vars.DOCKER_REPOSITORY }}/redash:${{ steps.version.outputs.VERSION_TAG }}
redash/redash:${{ steps.version.outputs.VERSION_TAG }}
context: .
build-args: |
test_all_deps=true
outputs: type=image,push-by-digest=false,push=true
cache-from: type=gha,scope=${{ matrix.arch }}
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
cache-from: type=gha,scope=multi-platform
cache-to: type=gha,mode=max,scope=multi-platform
platforms: linux/amd64,linux/arm64
env:
DOCKER_CONTENT_TRUST: true
- name: "Failure: output container logs to console"
if: failure()
run: docker compose logs
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
if [[ "${{ github.event.inputs.dockerRepository }}" == 'preview' || !github.event.workflow_run ]]; then
digest="${{ steps.build-preview.outputs.digest}}"
else
digest="${{ steps.build-release.outputs.digest}}"
fi
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ matrix.arch }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
merge-docker-image:
runs-on: ubuntu-22.04
needs: build-docker-image
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASS }}
- name: Download digests
uses: actions/download-artifact@v4
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Create and push manifest for the preview image
if: ${{ github.event.inputs.dockerRepository == 'preview' || !github.event.workflow_run }}
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:preview \
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:preview@sha256:%s ' *)
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_REPOSITORY }}/preview:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)
- name: Create and push manifest for the release image
if: ${{ github.event.inputs.dockerRepository == 'redash' }}
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create -t ${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }} \
$(printf '${{ vars.DOCKER_REPOSITORY }}/redash:${{ needs.build-docker-image.outputs.VERSION_TAG }}@sha256:%s ' *)

View File

@@ -95,7 +95,7 @@ EOF
WORKDIR /app
ENV POETRY_VERSION=2.1.4
ENV POETRY_VERSION=1.8.3
ENV POETRY_HOME=/etc/poetry
ENV POETRY_VIRTUALENVS_CREATE=false
RUN curl -sSL https://install.python-poetry.org | python3 -

View File

@@ -34,7 +34,7 @@ clean:
clean-all: clean
docker image rm --force \
redash/redash:latest redis:7-alpine maildev/maildev:latest \
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
down:

View File

@@ -46,7 +46,7 @@ server() {
MAX_REQUESTS=${MAX_REQUESTS:-1000}
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT --limit-request-line ${REDASH_GUNICORN_LIMIT_REQUEST_LINE:-0}
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT
}
create_db() {

View File

@@ -15,7 +15,7 @@ body {
display: table;
width: 100%;
padding: 10px;
height: calc(100% - 116px);
height: calc(100vh - 116px);
}
@media (min-width: 992px) {

View File

@@ -20,7 +20,7 @@ html {
html,
body {
height: 100%;
min-height: 100vh;
}
body {
@@ -35,7 +35,7 @@ body {
}
#application-root {
height: 100%;
min-height: 100vh;
}
#application-root,

View File

@@ -10,7 +10,7 @@
vertical-align: middle;
display: inline-block;
width: 1px;
height: 100%;
height: 100vh;
}
}
@@ -135,4 +135,4 @@
}
}

View File

@@ -8,7 +8,7 @@ body.fixed-layout {
padding-bottom: 0;
width: 100vw;
height: 100%;
height: 100vh;
.application-layout-content > div {
display: flex;
@@ -90,7 +90,7 @@ body.fixed-layout {
.embed__vis {
display: flex;
flex-flow: column;
height: calc(~'100% - 25px');
height: calc(~'100vh - 25px');
> .embed-heading {
flex: 0 0 auto;

View File

@@ -7,10 +7,10 @@ body #application-root {
flex-direction: row;
justify-content: stretch;
padding-bottom: 0 !important;
height: 100%;
height: 100vh;
.application-layout-side-menu {
height: 100%;
height: 100vh;
position: relative;
@media @mobileBreakpoint {
@@ -47,10 +47,6 @@ body #application-root {
}
}
body > section {
height: 100%;
}
body.fixed-layout #application-root {
.application-layout-content {
padding-bottom: 0;

View File

@@ -69,7 +69,7 @@ UserPreviewCard.defaultProps = {
// DataSourcePreviewCard
export function DataSourcePreviewCard({ dataSource, withLink, children, ...props }) {
const imageUrl = `/static/images/db-logos/${dataSource.type}.png`;
const imageUrl = `static/images/db-logos/${dataSource.type}.png`;
const title = withLink ? <Link href={"data_sources/" + dataSource.id}>{dataSource.name}</Link> : dataSource.name;
return (
<PreviewCard {...props} imageUrl={imageUrl} title={title}>

View File

@@ -51,7 +51,7 @@
right: 0;
background: linear-gradient(to bottom, transparent, transparent 2px, #f6f8f9 2px, #f6f8f9 5px),
linear-gradient(to left, #b3babf, #b3babf 1px, transparent 1px, transparent);
background-size: calc((100% + 15px) / 12) 5px;
background-size: calc((100% + 15px) / 6) 5px;
background-position: -7px 1px;
}
}

View File

@@ -96,7 +96,7 @@ function EmptyState({
}, []);
// Show if `onboardingMode=false` or any requested step not completed
const shouldShow = !onboardingMode || some(keys(isAvailable), (step) => isAvailable[step] && !isCompleted[step]);
const shouldShow = !onboardingMode || some(keys(isAvailable), step => isAvailable[step] && !isCompleted[step]);
if (!shouldShow) {
return null;
@@ -181,7 +181,7 @@ function EmptyState({
];
const stepsItems = getStepsItems ? getStepsItems(defaultStepsItems) : defaultStepsItems;
const imageSource = illustrationPath ? illustrationPath : "/static/images/illustrations/" + illustration + ".svg";
const imageSource = illustrationPath ? illustrationPath : "static/images/illustrations/" + illustration + ".svg";
return (
<div className="empty-state-wrapper">
@@ -196,7 +196,7 @@ function EmptyState({
</div>
<div className="empty-state__steps">
<h4>Let&apos;s get started</h4>
<ol>{stepsItems.map((item) => item.node)}</ol>
<ol>{stepsItems.map(item => item.node)}</ol>
{helpMessage}
</div>
</div>

View File

@@ -10,10 +10,6 @@ export interface PaginationOptions {
itemsPerPage?: number;
}
export interface SearchOptions {
isServerSideFTS?: boolean;
}
export interface Controller<I, P = any> {
params: P; // TODO: Find out what params is (except merging with props)
@@ -22,7 +18,7 @@ export interface Controller<I, P = any> {
// search
searchTerm?: string;
updateSearch: (searchTerm: string, searchOptions?: SearchOptions) => void;
updateSearch: (searchTerm: string) => void;
// tags
selectedTags: string[];
@@ -32,7 +28,6 @@ export interface Controller<I, P = any> {
orderByField?: string;
orderByReverse: boolean;
toggleSorting: (orderByField: string) => void;
setSorting: (orderByField: string, orderByReverse: boolean) => void;
// pagination
page: number;
@@ -98,7 +93,7 @@ export interface ItemsListWrappedComponentProps<I, P = any> {
export function wrap<I, P = any>(
WrappedComponent: React.ComponentType<ItemsListWrappedComponentProps<I>>,
createItemsSource: () => ItemsSource,
createStateStorage: ( { ...props }) => StateStorage
createStateStorage: () => StateStorage
) {
class ItemsListWrapper extends React.Component<ItemsListWrapperProps, ItemsListWrapperState<I, P>> {
private _itemsSource: ItemsSource;
@@ -121,7 +116,7 @@ export function wrap<I, P = any>(
constructor(props: ItemsListWrapperProps) {
super(props);
const stateStorage = createStateStorage({ ...props });
const stateStorage = createStateStorage();
const itemsSource = createItemsSource();
this._itemsSource = itemsSource;
@@ -144,11 +139,10 @@ export function wrap<I, P = any>(
this.props.onError!(error);
const initialState = this.getState({ ...itemsSource.getState(), isLoaded: false });
const { updatePagination, toggleSorting, setSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
const { updatePagination, toggleSorting, updateSearch, updateSelectedTags, update, handleError } = itemsSource;
this.state = {
...initialState,
toggleSorting, // eslint-disable-line react/no-unused-state
setSorting, // eslint-disable-line react/no-unused-state
updateSearch: debounce(updateSearch, 200), // eslint-disable-line react/no-unused-state
updateSelectedTags, // eslint-disable-line react/no-unused-state
updatePagination, // eslint-disable-line react/no-unused-state

View File

@@ -39,12 +39,14 @@ export class ItemsSource {
const customParams = {};
const context = {
...this.getCallbackContext(),
setCustomParams: (params) => {
setCustomParams: params => {
extend(customParams, params);
},
};
return this._beforeUpdate().then(() => {
const fetchToken = Math.random().toString(36).substr(2);
const fetchToken = Math.random()
.toString(36)
.substr(2);
this._currentFetchToken = fetchToken;
return this._fetcher
.fetch(changes, state, context)
@@ -57,7 +59,7 @@ export class ItemsSource {
return this._afterUpdate();
}
})
.catch((error) => this.handleError(error));
.catch(error => this.handleError(error));
});
}
@@ -122,26 +124,19 @@ export class ItemsSource {
});
};
toggleSorting = (orderByField) => {
toggleSorting = orderByField => {
this._sorter.toggleField(orderByField);
this._savedOrderByField = this._sorter.field;
this._changed({ sorting: true });
};
setSorting = (orderByField, orderByReverse) => {
this._sorter.setField(orderByField);
this._sorter.setReverse(orderByReverse);
this._savedOrderByField = this._sorter.field;
this._changed({ sorting: true });
};
updateSearch = (searchTerm, options) => {
updateSearch = searchTerm => {
// here we update state directly, but later `fetchData` will update it properly
this._searchTerm = searchTerm;
// in search mode ignore the ordering and use the ranking order
// provided by the server-side FTS backend instead, unless it was
// requested by the user by actively ordering in search mode
if (searchTerm === "" || !options?.isServerSideFTS) {
if (searchTerm === "") {
this._sorter.setField(this._savedOrderByField); // restore ordering
} else {
this._sorter.setField(null);
@@ -150,7 +145,7 @@ export class ItemsSource {
this._changed({ search: true, pagination: { page: true } });
};
updateSelectedTags = (selectedTags) => {
updateSelectedTags = selectedTags => {
this._selectedTags = selectedTags;
this._paginator.setPage(1);
this._changed({ tags: true, pagination: { page: true } });
@@ -158,7 +153,7 @@ export class ItemsSource {
update = () => this._changed();
handleError = (error) => {
handleError = error => {
if (isFunction(this.onError)) {
this.onError(error);
}
@@ -177,7 +172,7 @@ export class ResourceItemsSource extends ItemsSource {
processResults: (results, context) => {
let processItem = getItemProcessor(context);
processItem = isFunction(processItem) ? processItem : identity;
return map(results, (item) => processItem(item, context));
return map(results, item => processItem(item, context));
},
});
}

View File

@@ -44,7 +44,7 @@ export const Columns = {
date(overrides) {
return extend(
{
render: (text) => formatDate(text),
render: text => formatDate(text),
},
overrides
);
@@ -52,7 +52,7 @@ export const Columns = {
dateTime(overrides) {
return extend(
{
render: (text) => formatDateTime(text),
render: text => formatDateTime(text),
},
overrides
);
@@ -62,7 +62,7 @@ export const Columns = {
{
width: "1%",
className: "text-nowrap",
render: (text) => durationHumanize(text),
render: text => durationHumanize(text),
},
overrides
);
@@ -70,7 +70,7 @@ export const Columns = {
timeAgo(overrides, timeAgoCustomProps = undefined) {
return extend(
{
render: (value) => <TimeAgo date={value} {...timeAgoCustomProps} />,
render: value => <TimeAgo date={value} {...timeAgoCustomProps} />,
},
overrides
);
@@ -110,7 +110,6 @@ export default class ItemsTable extends React.Component {
orderByField: PropTypes.string,
orderByReverse: PropTypes.bool,
toggleSorting: PropTypes.func,
setSorting: PropTypes.func,
"data-test": PropTypes.string,
rowKey: PropTypes.oneOfType([PropTypes.string, PropTypes.func]),
};
@@ -128,15 +127,18 @@ export default class ItemsTable extends React.Component {
};
prepareColumns() {
const { orderByField, orderByReverse } = this.props;
const { orderByField, orderByReverse, toggleSorting } = this.props;
const orderByDirection = orderByReverse ? "descend" : "ascend";
return map(
map(
filter(this.props.columns, (column) => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
(column) => extend(column, { orderByField: column.orderByField || column.field })
filter(this.props.columns, column => (isFunction(column.isAvailable) ? column.isAvailable() : true)),
column => extend(column, { orderByField: column.orderByField || column.field })
),
(column, index) => {
// Bind click events only to sortable columns
const onHeaderCell = column.sorter ? () => ({ onClick: () => toggleSorting(column.orderByField) }) : null;
// Wrap render function to pass correct arguments
const render = isFunction(column.render) ? (text, row) => column.render(text, row.item) : identity;
@@ -144,13 +146,14 @@ export default class ItemsTable extends React.Component {
key: "column" + index,
dataIndex: ["item", column.field],
defaultSortOrder: column.orderByField === orderByField ? orderByDirection : null,
onHeaderCell,
render,
});
}
);
}
getRowKey = (record) => {
getRowKey = record => {
const { rowKey } = this.props;
if (rowKey) {
if (isFunction(rowKey)) {
@@ -169,43 +172,22 @@ export default class ItemsTable extends React.Component {
// Bind events only if `onRowClick` specified
const onTableRow = isFunction(this.props.onRowClick)
? (row) => ({
onClick: (event) => {
? row => ({
onClick: event => {
this.props.onRowClick(event, row.item);
},
})
: null;
const onChange = (pagination, filters, sorter, extra) => {
const action = extra?.action;
if (action === "sort") {
const propsColumn = this.props.columns.find((column) => column.field === sorter.field[1]);
if (!propsColumn.sorter) {
return;
}
let orderByField = propsColumn.orderByField;
const orderByReverse = sorter.order === "descend";
if (orderByReverse === undefined) {
orderByField = null;
}
if (this.props.setSorting) {
this.props.setSorting(orderByField, orderByReverse);
} else {
this.props.toggleSorting(orderByField);
}
}
};
const { showHeader } = this.props;
if (this.props.loading) {
if (isEmpty(tableDataProps.dataSource)) {
tableDataProps.columns = tableDataProps.columns.map((column) => ({
tableDataProps.columns = tableDataProps.columns.map(column => ({
...column,
sorter: false,
render: () => <Skeleton active paragraph={false} />,
}));
tableDataProps.dataSource = range(10).map((key) => ({ key: `${key}` }));
tableDataProps.dataSource = range(10).map(key => ({ key: `${key}` }));
} else {
tableDataProps.loading = { indicator: null };
}
@@ -218,7 +200,6 @@ export default class ItemsTable extends React.Component {
rowKey={this.getRowKey}
pagination={false}
onRow={onTableRow}
onChange={onChange}
data-test={this.props["data-test"]}
{...tableDataProps}
/>

View File

@@ -47,30 +47,20 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
return (
<div {...props}>
<div className="schema-list-item">
<Tooltip
title={item.description}
mouseEnterDelay={0}
mouseLeaveDelay={0}
placement="rightTop"
trigger={item.description ? "hover" : ""}
overlayStyle={{ whiteSpace: "pre-line" }}
>
<PlainButton className="table-name" onClick={onToggle}>
<i className="fa fa-table m-r-5" aria-hidden="true" />
<strong>
<span title={item.name}>{tableDisplayName}</span>
{!isNil(item.size) && <span> ({item.size})</span>}
</strong>
</PlainButton>
</Tooltip>
<PlainButton className="table-name" onClick={onToggle}>
<i className="fa fa-table m-r-5" aria-hidden="true" />
<strong>
<span title={item.name}>{tableDisplayName}</span>
{!isNil(item.size) && <span> ({item.size})</span>}
</strong>
</PlainButton>
<Tooltip
title="Insert table name into query text"
mouseEnterDelay={0}
mouseLeaveDelay={0}
placement="topRight"
arrowPointAtCenter
>
<PlainButton className="copy-to-editor" onClick={(e) => handleSelect(e, item.name)}>
arrowPointAtCenter>
<PlainButton className="copy-to-editor" onClick={e => handleSelect(e, item.name)}>
<i className="fa fa-angle-double-right" aria-hidden="true" />
</PlainButton>
</Tooltip>
@@ -80,22 +70,16 @@ function SchemaItem({ item, expanded, onToggle, onSelect, ...props }) {
{item.loading ? (
<div className="table-open">Loading...</div>
) : (
map(item.columns, (column) => {
map(item.columns, column => {
const columnName = get(column, "name");
const columnType = get(column, "type");
const columnDescription = get(column, "description");
return (
<Tooltip
title={"Insert column name into query text" + (columnDescription ? "\n" + columnDescription : "")}
title="Insert column name into query text"
mouseEnterDelay={0}
mouseLeaveDelay={0}
placement="rightTop"
>
<PlainButton
key={columnName}
className="table-open-item"
onClick={(e) => handleSelect(e, columnName)}
>
placement="rightTop">
<PlainButton key={columnName} className="table-open-item" onClick={e => handleSelect(e, columnName)}>
<div>
{columnName} {columnType && <span className="column-type">{columnType}</span>}
</div>
@@ -184,7 +168,7 @@ export function SchemaList({ loading, schema, expandedFlags, onTableExpand, onIt
}
export function applyFilterOnSchema(schema, filterString) {
const filters = filter(filterString.toLowerCase().split(/\s+/), (s) => s.length > 0);
const filters = filter(filterString.toLowerCase().split(/\s+/), s => s.length > 0);
// Empty string: return original schema
if (filters.length === 0) {
@@ -197,9 +181,9 @@ export function applyFilterOnSchema(schema, filterString) {
const columnFilter = filters[0];
return filter(
schema,
(item) =>
item =>
includes(item.name.toLowerCase(), nameFilter) ||
some(item.columns, (column) => includes(get(column, "name").toLowerCase(), columnFilter))
some(item.columns, column => includes(get(column, "name").toLowerCase(), columnFilter))
);
}
@@ -207,11 +191,11 @@ export function applyFilterOnSchema(schema, filterString) {
const nameFilter = filters[0];
const columnFilter = filters[1];
return filter(
map(schema, (item) => {
map(schema, item => {
if (includes(item.name.toLowerCase(), nameFilter)) {
item = {
...item,
columns: filter(item.columns, (column) => includes(get(column, "name").toLowerCase(), columnFilter)),
columns: filter(item.columns, column => includes(get(column, "name").toLowerCase(), columnFilter)),
};
return item.columns.length > 0 ? item : null;
}
@@ -259,7 +243,7 @@ export default function SchemaBrowser({
placeholder="Search schema..."
aria-label="Search schema"
disabled={schema.length === 0}
onChange={(event) => handleFilterChange(event.target.value)}
onChange={event => handleFilterChange(event.target.value)}
/>
<Tooltip title="Refresh Schema">

View File

@@ -59,7 +59,6 @@ function wrapComponentWithSettings(WrappedComponent) {
"dateTimeFormat",
"integerFormat",
"floatFormat",
"nullValue",
"booleanValues",
"tableCellMaxJSONSize",
"allowCustomJSVisualizations",

View File

@@ -1,13 +1,13 @@
export default {
columns: 12, // grid columns count
columns: 6, // grid columns count
rowHeight: 50, // grid row height (incl. bottom padding)
margins: 15, // widget margins
mobileBreakPoint: 800,
// defaults for widgets
defaultSizeX: 6,
defaultSizeX: 3,
defaultSizeY: 3,
minSizeX: 2,
maxSizeX: 12,
minSizeY: 2,
minSizeX: 1,
maxSizeX: 6,
minSizeY: 1,
maxSizeY: 1000,
};

View File

@@ -1,5 +1,5 @@
<!DOCTYPE html>
<html lang="en" translate="no">
<html lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta charset="UTF-8" />

View File

@@ -81,19 +81,12 @@ function DashboardListExtraActions(props) {
}
function DashboardList({ controller }) {
let usedListColumns = listColumns;
if (controller.params.currentPage === "favorites") {
usedListColumns = [
...usedListColumns,
Columns.dateTime.sortable({ title: "Starred At", field: "starred_at", width: "1%" }),
];
}
const {
areExtraActionsAvailable,
listColumns: tableColumns,
Component: ExtraActionsComponent,
selectedItems,
} = useItemsListExtraActions(controller, usedListColumns, DashboardListExtraActions);
} = useItemsListExtraActions(controller, listColumns, DashboardListExtraActions);
return (
<div className="page-dashboard-list">
@@ -146,9 +139,9 @@ function DashboardList({ controller }) {
showPageSizeSelect
totalCount={controller.totalItemsCount}
pageSize={controller.itemsPerPage}
onPageSizeChange={(itemsPerPage) => controller.updatePagination({ itemsPerPage })}
onPageSizeChange={itemsPerPage => controller.updatePagination({ itemsPerPage })}
page={controller.page}
onChange={(page) => controller.updatePagination({ page })}
onChange={page => controller.updatePagination({ page })}
/>
</div>
</React.Fragment>
@@ -177,10 +170,10 @@ const DashboardListPage = itemsList(
}[currentPage];
},
getItemProcessor() {
return (item) => new Dashboard(item);
return item => new Dashboard(item);
},
}),
({ ...props }) => new UrlStateStorage({ orderByField: props.orderByField ?? "created_at", orderByReverse: true })
() => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true })
);
routes.register(
@@ -188,7 +181,7 @@ routes.register(
routeWithUserSession({
path: "/dashboards",
title: "Dashboards",
render: (pageProps) => <DashboardListPage {...pageProps} currentPage="all" />,
render: pageProps => <DashboardListPage {...pageProps} currentPage="all" />,
})
);
routes.register(
@@ -196,7 +189,7 @@ routes.register(
routeWithUserSession({
path: "/dashboards/favorites",
title: "Favorite Dashboards",
render: (pageProps) => <DashboardListPage {...pageProps} currentPage="favorites" orderByField="starred_at" />,
render: pageProps => <DashboardListPage {...pageProps} currentPage="favorites" />,
})
);
routes.register(
@@ -204,6 +197,6 @@ routes.register(
routeWithUserSession({
path: "/dashboards/my",
title: "My Dashboards",
render: (pageProps) => <DashboardListPage {...pageProps} currentPage="my" />,
render: pageProps => <DashboardListPage {...pageProps} currentPage="my" />,
})
);

View File

@@ -8,7 +8,7 @@
}
> .container {
min-height: calc(100% - 95px);
min-height: calc(100vh - 95px);
}
.loading-message {

View File

@@ -15,7 +15,7 @@ export function FavoriteList({ title, resource, itemUrl, emptyState }) {
useEffect(() => {
setLoading(true);
resource
.favorites({ order: "-starred_at" })
.favorites()
.then(({ results }) => setItems(results))
.finally(() => setLoading(false));
}, [resource]);
@@ -28,7 +28,7 @@ export function FavoriteList({ title, resource, itemUrl, emptyState }) {
</div>
{!isEmpty(items) && (
<div role="list" className="list-group">
{items.map((item) => (
{items.map(item => (
<Link key={itemUrl(item)} role="listitem" className="list-group-item" href={itemUrl(item)}>
<span className="btn-favorite m-r-5">
<i className="fa fa-star" aria-hidden="true" />
@@ -61,7 +61,7 @@ export function DashboardAndQueryFavoritesList() {
<FavoriteList
title="Favorite Dashboards"
resource={Dashboard}
itemUrl={(dashboard) => dashboard.url}
itemUrl={dashboard => dashboard.url}
emptyState={
<p>
<span className="btn-favorite m-r-5">
@@ -76,7 +76,7 @@ export function DashboardAndQueryFavoritesList() {
<FavoriteList
title="Favorite Queries"
resource={Query}
itemUrl={(query) => `queries/${query.id}`}
itemUrl={query => `queries/${query.id}`}
emptyState={
<p>
<span className="btn-favorite m-r-5">

View File

@@ -1,4 +1,4 @@
import React, { useCallback, useEffect, useRef } from "react";
import React, { useEffect, useRef } from "react";
import cx from "classnames";
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
@@ -20,7 +20,7 @@ import ItemsTable, { Columns } from "@/components/items-list/components/ItemsTab
import Layout from "@/components/layouts/ContentWithSidebar";
import { Query } from "@/services/query";
import { clientConfig, currentUser } from "@/services/auth";
import { currentUser } from "@/services/auth";
import location from "@/services/location";
import routes from "@/services/routes";
@@ -95,39 +95,25 @@ function QueriesList({ controller }) {
const controllerRef = useRef();
controllerRef.current = controller;
const updateSearch = useCallback(
(searchTemm) => {
controller.updateSearch(searchTemm, { isServerSideFTS: !clientConfig.multiByteSearchEnabled });
},
[controller]
);
useEffect(() => {
const unlistenLocationChanges = location.listen((unused, action) => {
const searchTerm = location.search.q || "";
if (action === "PUSH" && searchTerm !== controllerRef.current.searchTerm) {
updateSearch(searchTerm);
controllerRef.current.updateSearch(searchTerm);
}
});
return () => {
unlistenLocationChanges();
};
}, [updateSearch]);
}, []);
let usedListColumns = listColumns;
if (controller.params.currentPage === "favorites") {
usedListColumns = [
...usedListColumns,
Columns.dateTime.sortable({ title: "Starred At", field: "starred_at", width: "1%" }),
];
}
const {
areExtraActionsAvailable,
listColumns: tableColumns,
Component: ExtraActionsComponent,
selectedItems,
} = useItemsListExtraActions(controller, usedListColumns, QueriesListExtraActions);
} = useItemsListExtraActions(controller, listColumns, QueriesListExtraActions);
return (
<div className="page-queries-list">
@@ -149,7 +135,7 @@ function QueriesList({ controller }) {
placeholder="Search Queries..."
label="Search queries"
value={controller.searchTerm}
onChange={updateSearch}
onChange={controller.updateSearch}
/>
<Sidebar.Menu items={sidebarMenu} selected={controller.params.currentPage} />
<Sidebar.Tags url="api/queries/tags" onChange={controller.updateSelectedTags} showUnselectAll />
@@ -174,15 +160,14 @@ function QueriesList({ controller }) {
orderByField={controller.orderByField}
orderByReverse={controller.orderByReverse}
toggleSorting={controller.toggleSorting}
setSorting={controller.setSorting}
/>
<Paginator
showPageSizeSelect
totalCount={controller.totalItemsCount}
pageSize={controller.itemsPerPage}
onPageSizeChange={(itemsPerPage) => controller.updatePagination({ itemsPerPage })}
onPageSizeChange={itemsPerPage => controller.updatePagination({ itemsPerPage })}
page={controller.page}
onChange={(page) => controller.updatePagination({ page })}
onChange={page => controller.updatePagination({ page })}
/>
</div>
</React.Fragment>
@@ -211,10 +196,10 @@ const QueriesListPage = itemsList(
}[currentPage];
},
getItemProcessor() {
return (item) => new Query(item);
return item => new Query(item);
},
}),
({ ...props }) => new UrlStateStorage({ orderByField: props.orderByField ?? "created_at", orderByReverse: true })
() => new UrlStateStorage({ orderByField: "created_at", orderByReverse: true })
);
routes.register(
@@ -222,7 +207,7 @@ routes.register(
routeWithUserSession({
path: "/queries",
title: "Queries",
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="all" />,
render: pageProps => <QueriesListPage {...pageProps} currentPage="all" />,
})
);
routes.register(
@@ -230,7 +215,7 @@ routes.register(
routeWithUserSession({
path: "/queries/favorites",
title: "Favorite Queries",
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="favorites" orderByField="starred_at" />,
render: pageProps => <QueriesListPage {...pageProps} currentPage="favorites" />,
})
);
routes.register(
@@ -238,7 +223,7 @@ routes.register(
routeWithUserSession({
path: "/queries/archive",
title: "Archived Queries",
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="archive" />,
render: pageProps => <QueriesListPage {...pageProps} currentPage="archive" />,
})
);
routes.register(
@@ -246,6 +231,6 @@ routes.register(
routeWithUserSession({
path: "/queries/my",
title: "My Queries",
render: (pageProps) => <QueriesListPage {...pageProps} currentPage="my" />,
render: pageProps => <QueriesListPage {...pageProps} currentPage="my" />,
})
);

View File

@@ -2,7 +2,7 @@ import PropTypes from "prop-types";
import React from "react";
export function QuerySourceTypeIcon(props) {
return <img src={`/static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
return <img src={`static/images/db-logos/${props.type}.png`} width="20" alt={props.alt} />;
}
QuerySourceTypeIcon.propTypes = {

View File

@@ -18,7 +18,7 @@ function EmptyState({ title, message, refreshButton }) {
<div className="query-results-empty-state">
<div className="empty-state-content">
<div>
<img src="/static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
<img src="static/images/illustrations/no-query-results.svg" alt="No Query Results Illustration" />
</div>
<h3>{title}</h3>
<div className="m-b-20">{message}</div>
@@ -40,7 +40,7 @@ EmptyState.defaultProps = {
function TabWithDeleteButton({ visualizationName, canDelete, onDelete, ...props }) {
const handleDelete = useCallback(
(e) => {
e => {
e.stopPropagation();
Modal.confirm({
title: "Delete Visualization",
@@ -111,8 +111,7 @@ export default function QueryVisualizationTabs({
className="add-visualization-button"
data-test="NewVisualization"
type="link"
onClick={() => onAddVisualization()}
>
onClick={() => onAddVisualization()}>
<i className="fa fa-plus" aria-hidden="true" />
<span className="m-l-5 hidden-xs">Add Visualization</span>
</Button>
@@ -120,7 +119,7 @@ export default function QueryVisualizationTabs({
}
const orderedVisualizations = useMemo(() => orderBy(visualizations, ["id"]), [visualizations]);
const isFirstVisualization = useCallback((visId) => visId === orderedVisualizations[0].id, [orderedVisualizations]);
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
const isMobile = useMedia({ maxWidth: 768 });
const [filters, setFilters] = useState([]);
@@ -133,10 +132,9 @@ export default function QueryVisualizationTabs({
data-test="QueryPageVisualizationTabs"
animated={false}
tabBarGutter={0}
onChange={(activeKey) => onChangeTab(+activeKey)}
destroyInactiveTabPane
>
{orderedVisualizations.map((visualization) => (
onChange={activeKey => onChangeTab(+activeKey)}
destroyInactiveTabPane>
{orderedVisualizations.map(visualization => (
<TabPane
key={`${visualization.id}`}
tab={
@@ -146,8 +144,7 @@ export default function QueryVisualizationTabs({
visualizationName={visualization.name}
onDelete={() => onDeleteVisualization(visualization.id)}
/>
}
>
}>
{queryResult ? (
<VisualizationRenderer
visualization={visualization}

View File

@@ -1,11 +1,16 @@
import { useCallback, useMemo, useState } from "react";
import { reduce } from "lodash";
import localOptions from "@/lib/localOptions";
function calculateTokensCount(schema) {
return reduce(schema, (totalLength, table) => totalLength + table.columns.length, 0);
}
export default function useAutocompleteFlags(schema) {
const isAvailable = true;
const isAvailable = useMemo(() => calculateTokensCount(schema) <= 5000, [schema]);
const [isEnabled, setIsEnabled] = useState(localOptions.get("liveAutocomplete", true));
const toggleAutocomplete = useCallback((state) => {
const toggleAutocomplete = useCallback(state => {
setIsEnabled(state);
localOptions.set("liveAutocomplete", state);
}, []);

View File

@@ -4,19 +4,19 @@ import { fetchDataFromJob } from "@/services/query-result";
export const SCHEMA_NOT_SUPPORTED = 1;
export const SCHEMA_LOAD_ERROR = 2;
export const IMG_ROOT = "/static/images/db-logos";
export const IMG_ROOT = "static/images/db-logos";
function mapSchemaColumnsToObject(columns) {
return map(columns, (column) => (isObject(column) ? column : { name: column }));
return map(columns, column => (isObject(column) ? column : { name: column }));
}
const DataSource = {
query: () => axios.get("api/data_sources"),
get: ({ id }) => axios.get(`api/data_sources/${id}`),
types: () => axios.get("api/data_sources/types"),
create: (data) => axios.post(`api/data_sources`, data),
save: (data) => axios.post(`api/data_sources/${data.id}`, data),
test: (data) => axios.post(`api/data_sources/${data.id}/test`),
create: data => axios.post(`api/data_sources`, data),
save: data => axios.post(`api/data_sources/${data.id}`, data),
test: data => axios.post(`api/data_sources/${data.id}/test`),
delete: ({ id }) => axios.delete(`api/data_sources/${id}`),
fetchSchema: (data, refresh = false) => {
const params = {};
@@ -27,15 +27,15 @@ const DataSource = {
return axios
.get(`api/data_sources/${data.id}/schema`, { params })
.then((data) => {
.then(data => {
if (has(data, "job")) {
return fetchDataFromJob(data.job.id).catch((error) =>
return fetchDataFromJob(data.job.id).catch(error =>
error.code === SCHEMA_NOT_SUPPORTED ? [] : Promise.reject(new Error(data.job.error))
);
}
return has(data, "schema") ? data.schema : Promise.reject();
})
.then((tables) => map(tables, (table) => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
.then(tables => map(tables, table => ({ ...table, columns: mapSchemaColumnsToObject(table.columns) })));
},
};

View File

@@ -9,7 +9,7 @@ function normalizeLocation(rawLocation) {
const result = {};
result.path = pathname;
result.search = mapValues(qs.parse(search), (value) => (isNil(value) ? true : value));
result.search = mapValues(qs.parse(search), value => (isNil(value) ? true : value));
result.hash = trimStart(hash, "#");
result.url = `${pathname}${search}${hash}`;
@@ -27,7 +27,7 @@ const location = {
confirmChange(handler) {
if (isFunction(handler)) {
return history.block((nextLocation) => {
return history.block(nextLocation => {
return handler(normalizeLocation(nextLocation), location);
});
} else {
@@ -60,18 +60,12 @@ const location = {
// serialize search and keep existing search parameters (!)
if (isObject(newLocation.search)) {
newLocation.search = omitBy(extend({}, location.search, newLocation.search), isNil);
newLocation.search = mapValues(newLocation.search, (value) => (value === true ? null : value));
newLocation.search = mapValues(newLocation.search, value => (value === true ? null : value));
newLocation.search = qs.stringify(newLocation.search);
}
}
if (replace) {
if (
newLocation.pathname !== location.path ||
newLocation.search !== qs.stringify(location.search) ||
newLocation.hash !== location.hash
) {
history.replace(newLocation);
}
history.replace(newLocation);
} else {
history.push(newLocation);
}

View File

@@ -9,7 +9,7 @@ const logger = debug("redash:services:QueryResult");
const filterTypes = ["filter", "multi-filter", "multiFilter"];
function defer() {
const result = { onStatusChange: (status) => {} };
const result = { onStatusChange: status => {} };
result.promise = new Promise((resolve, reject) => {
result.resolve = resolve;
result.reject = reject;
@@ -40,13 +40,13 @@ function getColumnNameWithoutType(column) {
}
function getColumnFriendlyName(column) {
return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, (a) => a.toUpperCase());
return getColumnNameWithoutType(column).replace(/(?:^|\s)\S/g, a => a.toUpperCase());
}
const createOrSaveUrl = (data) => (data.id ? `api/query_results/${data.id}` : "api/query_results");
const createOrSaveUrl = data => (data.id ? `api/query_results/${data.id}` : "api/query_results");
const QueryResultResource = {
get: ({ id }) => axios.get(`api/query_results/${id}`),
post: (data) => axios.post(createOrSaveUrl(data), data),
post: data => axios.post(createOrSaveUrl(data), data),
};
export const ExecutionStatus = {
@@ -97,11 +97,11 @@ function handleErrorResponse(queryResult, error) {
}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
return new Promise(resolve => setTimeout(resolve, ms));
}
export function fetchDataFromJob(jobId, interval = 1000) {
return axios.get(`api/jobs/${jobId}`).then((data) => {
return axios.get(`api/jobs/${jobId}`).then(data => {
const status = statuses[data.job.status];
if (status === ExecutionStatus.WAITING || status === ExecutionStatus.PROCESSING) {
return sleep(interval).then(() => fetchDataFromJob(data.job.id));
@@ -146,7 +146,7 @@ class QueryResult {
// TODO: we should stop manipulating incoming data, and switch to relaying
// on the column type set by the backend. This logic is prone to errors,
// and better be removed. Kept for now, for backward compatability.
each(this.query_result.data.rows, (row) => {
each(this.query_result.data.rows, row => {
forOwn(row, (v, k) => {
let newType = null;
if (isNumber(v)) {
@@ -173,7 +173,7 @@ class QueryResult {
});
});
each(this.query_result.data.columns, (column) => {
each(this.query_result.data.columns, column => {
column.name = "" + column.name;
if (columnTypes[column.name]) {
if (column.type == null || column.type === "string") {
@@ -265,14 +265,14 @@ class QueryResult {
getColumnNames() {
if (this.columnNames === undefined && this.query_result.data) {
this.columnNames = this.query_result.data.columns.map((v) => v.name);
this.columnNames = this.query_result.data.columns.map(v => v.name);
}
return this.columnNames;
}
getColumnFriendlyNames() {
return this.getColumnNames().map((col) => getColumnFriendlyName(col));
return this.getColumnNames().map(col => getColumnFriendlyName(col));
}
getTruncated() {
@@ -286,7 +286,7 @@ class QueryResult {
const filters = [];
this.getColumns().forEach((col) => {
this.getColumns().forEach(col => {
const name = col.name;
const type = name.split("::")[1] || name.split("__")[1];
if (includes(filterTypes, type)) {
@@ -302,8 +302,8 @@ class QueryResult {
}
}, this);
this.getRawData().forEach((row) => {
filters.forEach((filter) => {
this.getRawData().forEach(row => {
filters.forEach(filter => {
filter.values.push(row[filter.name]);
if (filter.values.length === 1) {
if (filter.multiple) {
@@ -315,8 +315,8 @@ class QueryResult {
});
});
filters.forEach((filter) => {
filter.values = uniqBy(filter.values, (v) => {
filters.forEach(filter => {
filter.values = uniqBy(filter.values, v => {
if (moment.isMoment(v)) {
return v.unix();
}
@@ -345,12 +345,12 @@ class QueryResult {
axios
.get(`api/queries/${queryId}/results/${id}.json`)
.then((response) => {
.then(response => {
// Success handler
queryResult.isLoadingResult = false;
queryResult.update(response);
})
.catch((error) => {
.catch(error => {
// Error handler
queryResult.isLoadingResult = false;
handleErrorResponse(queryResult, error);
@@ -362,10 +362,10 @@ class QueryResult {
loadLatestCachedResult(queryId, parameters) {
axios
.post(`api/queries/${queryId}/results`, { queryId, parameters })
.then((response) => {
.then(response => {
this.update(response);
})
.catch((error) => {
.catch(error => {
handleErrorResponse(this, error);
});
}
@@ -375,11 +375,11 @@ class QueryResult {
this.deferred.onStatusChange(ExecutionStatus.LOADING_RESULT);
QueryResultResource.get({ id: this.job.query_result_id })
.then((response) => {
.then(response => {
this.update(response);
this.isLoadingResult = false;
})
.catch((error) => {
.catch(error => {
if (tryCount === undefined) {
tryCount = 0;
}
@@ -394,12 +394,9 @@ class QueryResult {
});
this.isLoadingResult = false;
} else {
setTimeout(
() => {
this.loadResult(tryCount + 1);
},
1000 * Math.pow(2, tryCount)
);
setTimeout(() => {
this.loadResult(tryCount + 1);
}, 1000 * Math.pow(2, tryCount));
}
});
}
@@ -413,26 +410,19 @@ class QueryResult {
: axios.get(`api/queries/${query}/jobs/${this.job.id}`);
request
.then((jobResponse) => {
.then(jobResponse => {
this.update(jobResponse);
if (this.getStatus() === "processing" && this.job.query_result_id && this.job.query_result_id !== "None") {
loadResult();
} else if (this.getStatus() !== "failed") {
let waitTime;
if (tryNumber <= 10) {
waitTime = 500;
} else if (tryNumber <= 50) {
waitTime = 1000;
} else {
waitTime = 3000;
}
const waitTime = tryNumber > 10 ? 3000 : 500;
setTimeout(() => {
this.refreshStatus(query, parameters, tryNumber + 1);
}, waitTime);
}
})
.catch((error) => {
.catch(error => {
logger("Connection error", error);
// TODO: use QueryResultError, or better yet: exception/reject of promise.
this.update({
@@ -461,14 +451,14 @@ class QueryResult {
axios
.post(`api/queries/${id}/results`, { id, parameters, apply_auto_limit: applyAutoLimit, max_age: maxAge })
.then((response) => {
.then(response => {
queryResult.update(response);
if ("job" in response) {
queryResult.refreshStatus(id, parameters);
}
})
.catch((error) => {
.catch(error => {
handleErrorResponse(queryResult, error);
});
@@ -491,14 +481,14 @@ class QueryResult {
}
QueryResultResource.post(params)
.then((response) => {
.then(response => {
queryResult.update(response);
if ("job" in response) {
queryResult.refreshStatus(query, parameters);
}
})
.catch((error) => {
.catch(error => {
handleErrorResponse(queryResult, error);
});

View File

@@ -63,7 +63,7 @@ function runCypressCI() {
CYPRESS_OPTIONS, // eslint-disable-line no-unused-vars
} = process.env;
if (GITHUB_REPOSITORY === "getredash/redash" && process.env.CYPRESS_RECORD_KEY) {
if (GITHUB_REPOSITORY === "getredash/redash") {
process.env.CYPRESS_OPTIONS = "--record";
}

View File

@@ -23,7 +23,7 @@ describe("Dashboard", () => {
cy.getByTestId("DashboardSaveButton").click();
});
cy.wait("@NewDashboard").then((xhr) => {
cy.wait("@NewDashboard").then(xhr => {
const id = Cypress._.get(xhr, "response.body.id");
assert.isDefined(id, "Dashboard api call returns id");
@@ -40,9 +40,13 @@ describe("Dashboard", () => {
cy.getByTestId("DashboardMoreButton").click();
cy.getByTestId("DashboardMoreButtonMenu").contains("Archive").click();
cy.getByTestId("DashboardMoreButtonMenu")
.contains("Archive")
.click();
cy.get(".ant-modal .ant-btn").contains("Archive").click({ force: true });
cy.get(".ant-modal .ant-btn")
.contains("Archive")
.click({ force: true });
cy.get(".label-tag-archived").should("exist");
cy.visit("/dashboards");
@@ -56,7 +60,7 @@ describe("Dashboard", () => {
cy.server();
cy.route("GET", "**/api/dashboards/*").as("LoadDashboard");
cy.createDashboard("Dashboard multiple urls").then(({ id, slug }) => {
[`/dashboards/${id}`, `/dashboards/${id}-anything-here`, `/dashboard/${slug}`].forEach((url) => {
[`/dashboards/${id}`, `/dashboards/${id}-anything-here`, `/dashboard/${slug}`].forEach(url => {
cy.visit(url);
cy.wait("@LoadDashboard");
cy.getByTestId(`DashboardId${id}Container`).should("exist");
@@ -68,7 +72,7 @@ describe("Dashboard", () => {
});
context("viewport width is at 800px", () => {
before(function () {
before(function() {
cy.login();
cy.createDashboard("Foo Bar")
.then(({ id }) => {
@@ -76,42 +80,49 @@ describe("Dashboard", () => {
this.dashboardEditUrl = `/dashboards/${id}?edit`;
return cy.addTextbox(id, "Hello World!").then(getWidgetTestId);
})
.then((elTestId) => {
.then(elTestId => {
cy.visit(this.dashboardUrl);
cy.getByTestId(elTestId).as("textboxEl");
});
});
beforeEach(function () {
beforeEach(function() {
cy.login();
cy.visit(this.dashboardUrl);
cy.viewport(800 + menuWidth, 800);
});
it("shows widgets with full width", () => {
cy.get("@textboxEl").should(($el) => {
cy.get("@textboxEl").should($el => {
expect($el.width()).to.eq(770);
});
cy.viewport(801 + menuWidth, 800);
cy.get("@textboxEl").should(($el) => {
expect($el.width()).to.eq(182);
cy.get("@textboxEl").should($el => {
expect($el.width()).to.eq(378);
});
});
it("hides edit option", () => {
cy.getByTestId("DashboardMoreButton").click().should("be.visible");
cy.getByTestId("DashboardMoreButton")
.click()
.should("be.visible");
cy.getByTestId("DashboardMoreButtonMenu").contains("Edit").as("editButton").should("not.be.visible");
cy.getByTestId("DashboardMoreButtonMenu")
.contains("Edit")
.as("editButton")
.should("not.be.visible");
cy.viewport(801 + menuWidth, 800);
cy.get("@editButton").should("be.visible");
});
it("disables edit mode", function () {
it("disables edit mode", function() {
cy.viewport(801 + menuWidth, 800);
cy.visit(this.dashboardEditUrl);
cy.contains("button", "Done Editing").as("saveButton").should("exist");
cy.contains("button", "Done Editing")
.as("saveButton")
.should("exist");
cy.viewport(800 + menuWidth, 800);
cy.contains("button", "Done Editing").should("not.exist");
@@ -119,14 +130,14 @@ describe("Dashboard", () => {
});
context("viewport width is at 767px", () => {
before(function () {
before(function() {
cy.login();
cy.createDashboard("Foo Bar").then(({ id }) => {
this.dashboardUrl = `/dashboards/${id}`;
});
});
beforeEach(function () {
beforeEach(function() {
cy.visit(this.dashboardUrl);
cy.viewport(767, 800);
});

View File

@@ -5,7 +5,7 @@ import { getWidgetTestId, editDashboard, resizeBy } from "../../support/dashboar
const menuWidth = 80;
describe("Grid compliant widgets", () => {
beforeEach(function () {
beforeEach(function() {
cy.login();
cy.viewport(1215 + menuWidth, 800);
cy.createDashboard("Foo Bar")
@@ -13,7 +13,7 @@ describe("Grid compliant widgets", () => {
this.dashboardUrl = `/dashboards/${id}`;
return cy.addTextbox(id, "Hello World!").then(getWidgetTestId);
})
.then((elTestId) => {
.then(elTestId => {
cy.visit(this.dashboardUrl);
cy.getByTestId(elTestId).as("textboxEl");
});
@@ -27,7 +27,7 @@ describe("Grid compliant widgets", () => {
it("stays put when dragged under snap threshold", () => {
cy.get("@textboxEl")
.dragBy(30)
.dragBy(90)
.invoke("offset")
.should("have.property", "left", 15 + menuWidth); // no change, 15 -> 15
});
@@ -36,14 +36,14 @@ describe("Grid compliant widgets", () => {
cy.get("@textboxEl")
.dragBy(110)
.invoke("offset")
.should("have.property", "left", 115 + menuWidth); // moved by 100, 15 -> 115
.should("have.property", "left", 215 + menuWidth); // moved by 200, 15 -> 215
});
it("moves two columns when dragged over snap threshold", () => {
cy.get("@textboxEl")
.dragBy(200)
.dragBy(330)
.invoke("offset")
.should("have.property", "left", 215 + menuWidth); // moved by 200, 15 -> 215
.should("have.property", "left", 415 + menuWidth); // moved by 400, 15 -> 415
});
});
@@ -52,7 +52,7 @@ describe("Grid compliant widgets", () => {
cy.route("POST", "**/api/widgets/*").as("WidgetSave");
editDashboard();
cy.get("@textboxEl").dragBy(100);
cy.get("@textboxEl").dragBy(330);
cy.wait("@WidgetSave");
});
});
@@ -64,24 +64,24 @@ describe("Grid compliant widgets", () => {
});
it("stays put when dragged under snap threshold", () => {
resizeBy(cy.get("@textboxEl"), 30)
resizeBy(cy.get("@textboxEl"), 90)
.then(() => cy.get("@textboxEl"))
.invoke("width")
.should("eq", 285); // no change, 285 -> 285
.should("eq", 585); // no change, 585 -> 585
});
it("moves one column when dragged over snap threshold", () => {
resizeBy(cy.get("@textboxEl"), 110)
.then(() => cy.get("@textboxEl"))
.invoke("width")
.should("eq", 385); // resized by 200, 185 -> 385
.should("eq", 785); // resized by 200, 585 -> 785
});
it("moves two columns when dragged over snap threshold", () => {
resizeBy(cy.get("@textboxEl"), 400)
.then(() => cy.get("@textboxEl"))
.invoke("width")
.should("eq", 685); // resized by 400, 285 -> 685
.should("eq", 985); // resized by 400, 585 -> 985
});
});
@@ -101,16 +101,16 @@ describe("Grid compliant widgets", () => {
resizeBy(cy.get("@textboxEl"), 0, 30)
.then(() => cy.get("@textboxEl"))
.invoke("height")
.should("eq", 185);
.should("eq", 185); // resized by 50, , 135 -> 185
});
it("shrinks to minimum", () => {
cy.get("@textboxEl")
.then(($el) => resizeBy(cy.get("@textboxEl"), -$el.width(), -$el.height())) // resize to 0,0
.then($el => resizeBy(cy.get("@textboxEl"), -$el.width(), -$el.height())) // resize to 0,0
.then(() => cy.get("@textboxEl"))
.should(($el) => {
.should($el => {
expect($el.width()).to.eq(185); // min textbox width
expect($el.height()).to.eq(85); // min textbox height
expect($el.height()).to.eq(35); // min textbox height
});
});
});

View File

@@ -3,7 +3,7 @@
import { getWidgetTestId, editDashboard } from "../../support/dashboard";
describe("Textbox", () => {
beforeEach(function () {
beforeEach(function() {
cy.login();
cy.createDashboard("Foo Bar").then(({ id }) => {
this.dashboardId = id;
@@ -12,10 +12,12 @@ describe("Textbox", () => {
});
const confirmDeletionInModal = () => {
cy.get(".ant-modal .ant-btn").contains("Delete").click({ force: true });
cy.get(".ant-modal .ant-btn")
.contains("Delete")
.click({ force: true });
};
it("adds textbox", function () {
it("adds textbox", function() {
cy.visit(this.dashboardUrl);
editDashboard();
cy.getByTestId("AddTextboxButton").click();
@@ -27,10 +29,10 @@ describe("Textbox", () => {
cy.get(".widget-text").should("exist");
});
it("removes textbox by X button", function () {
it("removes textbox by X button", function() {
cy.addTextbox(this.dashboardId, "Hello World!")
.then(getWidgetTestId)
.then((elTestId) => {
.then(elTestId => {
cy.visit(this.dashboardUrl);
editDashboard();
@@ -43,30 +45,32 @@ describe("Textbox", () => {
});
});
it("removes textbox by menu", function () {
it("removes textbox by menu", function() {
cy.addTextbox(this.dashboardId, "Hello World!")
.then(getWidgetTestId)
.then((elTestId) => {
.then(elTestId => {
cy.visit(this.dashboardUrl);
cy.getByTestId(elTestId).within(() => {
cy.getByTestId("WidgetDropdownButton").click();
});
cy.getByTestId("WidgetDropdownButtonMenu").contains("Remove from Dashboard").click();
cy.getByTestId("WidgetDropdownButtonMenu")
.contains("Remove from Dashboard")
.click();
confirmDeletionInModal();
cy.getByTestId(elTestId).should("not.exist");
});
});
it("allows opening menu after removal", function () {
it("allows opening menu after removal", function() {
let elTestId1;
cy.addTextbox(this.dashboardId, "txb 1")
.then(getWidgetTestId)
.then((elTestId) => {
.then(elTestId => {
elTestId1 = elTestId;
return cy.addTextbox(this.dashboardId, "txb 2").then(getWidgetTestId);
})
.then((elTestId2) => {
.then(elTestId2 => {
cy.visit(this.dashboardUrl);
editDashboard();
@@ -93,10 +97,10 @@ describe("Textbox", () => {
});
});
it("edits textbox", function () {
it("edits textbox", function() {
cy.addTextbox(this.dashboardId, "Hello World!")
.then(getWidgetTestId)
.then((elTestId) => {
.then(elTestId => {
cy.visit(this.dashboardUrl);
cy.getByTestId(elTestId)
.as("textboxEl")
@@ -104,13 +108,17 @@ describe("Textbox", () => {
cy.getByTestId("WidgetDropdownButton").click();
});
cy.getByTestId("WidgetDropdownButtonMenu").contains("Edit").click();
cy.getByTestId("WidgetDropdownButtonMenu")
.contains("Edit")
.click();
const newContent = "[edited]";
cy.getByTestId("TextboxDialog")
.should("exist")
.within(() => {
cy.get("textarea").clear().type(newContent);
cy.get("textarea")
.clear()
.type(newContent);
cy.contains("button", "Save").click();
});
@@ -118,7 +126,7 @@ describe("Textbox", () => {
});
});
it("renders textbox according to position configuration", function () {
it("renders textbox according to position configuration", function() {
const id = this.dashboardId;
const txb1Pos = { col: 0, row: 0, sizeX: 3, sizeY: 2 };
const txb2Pos = { col: 1, row: 1, sizeX: 3, sizeY: 4 };
@@ -127,15 +135,15 @@ describe("Textbox", () => {
cy.addTextbox(id, "x", { position: txb1Pos })
.then(() => cy.addTextbox(id, "x", { position: txb2Pos }))
.then(getWidgetTestId)
.then((elTestId) => {
.then(elTestId => {
cy.visit(this.dashboardUrl);
return cy.getByTestId(elTestId);
})
.should(($el) => {
.should($el => {
const { top, left } = $el.offset();
expect(top).to.be.oneOf([162, 162.015625]);
expect(left).to.eq(188);
expect($el.width()).to.eq(265);
expect(left).to.eq(282);
expect($el.width()).to.eq(545);
expect($el.height()).to.eq(185);
});
});

View File

@@ -3,26 +3,36 @@
* @param should Passed to should expression after plot points are captured
*/
export function assertPlotPreview(should = "exist") {
cy.getByTestId("VisualizationPreview").find("g.overplot").should("exist").find("g.points").should(should);
cy.getByTestId("VisualizationPreview")
.find("g.plot")
.should("exist")
.find("g.points")
.should(should);
}
export function createChartThroughUI(chartName, chartSpecificAssertionFn = () => {}) {
cy.getByTestId("NewVisualization").click();
cy.getByTestId("VisualizationType").selectAntdOption("VisualizationType.CHART");
cy.getByTestId("VisualizationName").clear().type(chartName);
cy.getByTestId("VisualizationName")
.clear()
.type(chartName);
chartSpecificAssertionFn();
cy.server();
cy.route("POST", "**/api/visualizations").as("SaveVisualization");
cy.getByTestId("EditVisualizationDialog").contains("button", "Save").click();
cy.getByTestId("EditVisualizationDialog")
.contains("button", "Save")
.click();
cy.getByTestId("QueryPageVisualizationTabs").contains("span", chartName).should("exist");
cy.getByTestId("QueryPageVisualizationTabs")
.contains("span", chartName)
.should("exist");
cy.wait("@SaveVisualization").should("have.property", "status", 200);
return cy.get("@SaveVisualization").then((xhr) => {
return cy.get("@SaveVisualization").then(xhr => {
const { id, name, options } = xhr.response.body;
return cy.wrap({ id, name, options });
});
@@ -32,13 +42,19 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
cy.getByTestId("Chart.GlobalSeriesType").should("exist");
cy.getByTestId("VisualizationEditor.Tabs.Series").click();
cy.getByTestId("VisualizationEditor").find("table").should("exist");
cy.getByTestId("VisualizationEditor")
.find("table")
.should("exist");
cy.getByTestId("VisualizationEditor.Tabs.Colors").click();
cy.getByTestId("VisualizationEditor").find("table").should("exist");
cy.getByTestId("VisualizationEditor")
.find("table")
.should("exist");
cy.getByTestId("VisualizationEditor.Tabs.DataLabels").click();
cy.getByTestId("VisualizationEditor").getByTestId("Chart.DataLabels.ShowDataLabels").should("exist");
cy.getByTestId("VisualizationEditor")
.getByTestId("Chart.DataLabels.ShowDataLabels")
.should("exist");
chartSpecificTabbedEditorAssertionFn();
@@ -47,29 +63,39 @@ export function assertTabbedEditor(chartSpecificTabbedEditorAssertionFn = () =>
export function assertAxesAndAddLabels(xaxisLabel, yaxisLabel) {
cy.getByTestId("VisualizationEditor.Tabs.XAxis").click();
cy.getByTestId("Chart.XAxis.Type").contains(".ant-select-selection-item", "Auto Detect").should("exist");
cy.getByTestId("Chart.XAxis.Type")
.contains(".ant-select-selection-item", "Auto Detect")
.should("exist");
cy.getByTestId("Chart.XAxis.Name").clear().type(xaxisLabel);
cy.getByTestId("Chart.XAxis.Name")
.clear()
.type(xaxisLabel);
cy.getByTestId("VisualizationEditor.Tabs.YAxis").click();
cy.getByTestId("Chart.LeftYAxis.Type").contains(".ant-select-selection-item", "Linear").should("exist");
cy.getByTestId("Chart.LeftYAxis.Type")
.contains(".ant-select-selection-item", "Linear")
.should("exist");
cy.getByTestId("Chart.LeftYAxis.Name").clear().type(yaxisLabel);
cy.getByTestId("Chart.LeftYAxis.Name")
.clear()
.type(yaxisLabel);
cy.getByTestId("Chart.LeftYAxis.TickFormat").clear().type("+");
cy.getByTestId("Chart.LeftYAxis.TickFormat")
.clear()
.type("+");
cy.getByTestId("VisualizationEditor.Tabs.General").click();
}
export function createDashboardWithCharts(title, chartGetters, widgetsAssertionFn = () => {}) {
cy.createDashboard(title).then((dashboard) => {
cy.createDashboard(title).then(dashboard => {
const dashboardUrl = `/dashboards/${dashboard.id}`;
const widgetGetters = chartGetters.map((chartGetter) => `${chartGetter}Widget`);
const widgetGetters = chartGetters.map(chartGetter => `${chartGetter}Widget`);
chartGetters.forEach((chartGetter, i) => {
const position = { autoHeight: false, sizeY: 8, sizeX: 3, col: (i % 2) * 3 };
cy.get(`@${chartGetter}`)
.then((chart) => cy.addWidget(dashboard.id, chart.id, { position }))
.then(chart => cy.addWidget(dashboard.id, chart.id, { position }))
.as(widgetGetters[i]);
});

View File

@@ -1,26 +0,0 @@
"""set default alert selector
Revision ID: 1655999df5e3
Revises: 9e8c841d1a30
Create Date: 2025-07-09 14:44:00
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '1655999df5e3'
down_revision = '9e8c841d1a30'
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
UPDATE alerts
SET options = jsonb_set(options, '{selector}', '"first"')
WHERE options->>'selector' IS NULL;
""")
def downgrade():
pass

View File

@@ -1,34 +0,0 @@
"""12-column dashboard layout
Revision ID: db0aca1ebd32
Revises: 1655999df5e3
Create Date: 2025-03-31 13:45:43.160893
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'db0aca1ebd32'
down_revision = '1655999df5e3'
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
UPDATE widgets
SET options = jsonb_set(options, '{position,col}', to_json((options->'position'->>'col')::int * 2)::jsonb);
UPDATE widgets
SET options = jsonb_set(options, '{position,sizeX}', to_json((options->'position'->>'sizeX')::int * 2)::jsonb);
""")
def downgrade():
op.execute("""
UPDATE widgets
SET options = jsonb_set(options, '{position,col}', to_json((options->'position'->>'col')::int / 2)::jsonb);
UPDATE widgets
SET options = jsonb_set(options, '{position,sizeX}', to_json((options->'position'->>'sizeX')::int / 2)::jsonb);
""")

View File

@@ -1,6 +1,6 @@
{
"name": "redash-client",
"version": "25.10.0-dev",
"version": "25.1.0",
"description": "The frontend part of Redash.",
"main": "index.js",
"scripts": {
@@ -47,7 +47,7 @@
"@ant-design/icons": "^4.2.1",
"@redash/viz": "file:viz-lib",
"ace-builds": "^1.4.12",
"antd": "4.4.3",
"antd": "^4.4.3",
"axios": "0.27.2",
"axios-auth-refresh": "3.3.6",
"bootstrap": "^3.4.1",
@@ -100,7 +100,6 @@
"@types/sql-formatter": "^2.3.0",
"@typescript-eslint/eslint-plugin": "^2.10.0",
"@typescript-eslint/parser": "^2.10.0",
"assert": "^2.1.0",
"atob": "^2.1.2",
"babel-eslint": "^10.0.3",
"babel-jest": "^24.1.0",
@@ -139,24 +138,20 @@
"mini-css-extract-plugin": "^1.6.2",
"mockdate": "^2.0.2",
"npm-run-all": "^4.1.5",
"prettier": "3.3.2",
"process": "^0.11.10",
"prettier": "^1.19.1",
"raw-loader": "^0.5.1",
"react-refresh": "^0.14.0",
"react-test-renderer": "^16.14.0",
"request-cookies": "^1.1.0",
"source-map-loader": "^1.1.3",
"stream-browserify": "^3.0.0",
"style-loader": "^2.0.0",
"typescript": "4.1.2",
"url": "^0.11.4",
"typescript": "^4.1.2",
"url-loader": "^4.1.1",
"webpack": "^5.101.3",
"webpack-build-notifier": "^3.0.1",
"webpack": "^4.46.0",
"webpack-build-notifier": "^2.3.0",
"webpack-bundle-analyzer": "^4.9.0",
"webpack-cli": "^4.10.0",
"webpack-dev-server": "^4.15.1",
"webpack-manifest-plugin": "^5.0.1"
"webpack-manifest-plugin": "^2.0.4"
},
"optionalDependencies": {
"fsevents": "^2.3.2"

3105
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,5 @@
[project]
name = "redash"
version = "25.10.0-dev"
requires-python = ">=3.8"
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
authors = [
{ name = "Arik Fraimovich", email = "<arik@redash.io>" }
]
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
maintainers = [
{ name = "Redash maintainers and contributors", email = "<maintainers@redash.io>" }
]
readme = "README.md"
dependencies = []
[tool.black]
target-version = ['py38']
@@ -22,6 +10,17 @@ force-exclude = '''
)/
'''
[tool.poetry]
name = "redash"
version = "25.1.0"
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
authors = ["Arik Fraimovich <arik@redash.io>"]
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
maintainers = [
"Redash maintainers and contributors <maintainers@redash.io>",
]
readme = "README.md"
[tool.poetry.dependencies]
python = ">=3.8,<3.11"
advocate = "1.0.0"
@@ -89,14 +88,13 @@ tzlocal = "4.3.1"
pyodbc = "5.1.0"
debugpy = "^1.8.9"
paramiko = "3.4.1"
oracledb = "2.5.1"
[tool.poetry.group.all_ds]
optional = true
[tool.poetry.group.all_ds.dependencies]
atsd-client = "3.0.5"
azure-kusto-data = "5.0.1"
azure-kusto-data = "0.0.35"
boto3 = "1.28.8"
botocore = "1.31.8"
cassandra-driver = "3.21.0"
@@ -111,11 +109,11 @@ influxdb = "5.2.3"
influxdb-client = "1.38.0"
memsql = "3.2.0"
mysqlclient = "2.1.1"
numpy = "1.24.4"
nzalchemy = "^11.0.2"
nzpy = ">=1.15"
oauth2client = "4.1.3"
openpyxl = "3.0.7"
oracledb = "2.1.2"
pandas = "1.3.4"
phoenixdb = "0.7"
pinotdb = ">=0.4.5"

View File

@@ -14,7 +14,7 @@ from redash.app import create_app # noqa
from redash.destinations import import_destinations
from redash.query_runner import import_query_runners
__version__ = "25.10.0-dev"
__version__ = "25.1.0"
if os.environ.get("REMOTE_DEBUG"):

View File

@@ -4,7 +4,7 @@ import requests
from authlib.integrations.flask_client import OAuth
from flask import Blueprint, flash, redirect, request, session, url_for
from redash import models, settings
from redash import models
from redash.authentication import (
create_and_login_user,
get_next_path,
@@ -29,41 +29,6 @@ def verify_profile(org, profile):
return False
def get_user_profile(access_token, logger):
headers = {"Authorization": f"OAuth {access_token}"}
response = requests.get("https://www.googleapis.com/oauth2/v1/userinfo", headers=headers)
if response.status_code == 401:
logger.warning("Failed getting user profile (response code 401).")
return None
return response.json()
def build_redirect_uri():
scheme = settings.GOOGLE_OAUTH_SCHEME_OVERRIDE or None
return url_for(".callback", _external=True, _scheme=scheme)
def build_next_path(org_slug=None):
next_path = request.args.get("next")
if not next_path:
if org_slug is None:
org_slug = session.get("org_slug")
scheme = None
if settings.GOOGLE_OAUTH_SCHEME_OVERRIDE:
scheme = settings.GOOGLE_OAUTH_SCHEME_OVERRIDE
next_path = url_for(
"redash.index",
org_slug=org_slug,
_external=True,
_scheme=scheme,
)
return next_path
def create_google_oauth_blueprint(app):
oauth = OAuth(app)
@@ -71,12 +36,23 @@ def create_google_oauth_blueprint(app):
blueprint = Blueprint("google_oauth", __name__)
CONF_URL = "https://accounts.google.com/.well-known/openid-configuration"
oauth = OAuth(app)
oauth.register(
name="google",
server_metadata_url=CONF_URL,
client_kwargs={"scope": "openid email profile"},
)
def get_user_profile(access_token):
headers = {"Authorization": "OAuth {}".format(access_token)}
response = requests.get("https://www.googleapis.com/oauth2/v1/userinfo", headers=headers)
if response.status_code == 401:
logger.warning("Failed getting user profile (response code 401).")
return None
return response.json()
@blueprint.route("/<org_slug>/oauth/google", endpoint="authorize_org")
def org_login(org_slug):
session["org_slug"] = current_org.slug
@@ -84,9 +60,9 @@ def create_google_oauth_blueprint(app):
@blueprint.route("/oauth/google", endpoint="authorize")
def login():
redirect_uri = build_redirect_uri()
redirect_uri = url_for(".callback", _external=True)
next_path = build_next_path()
next_path = request.args.get("next", url_for("redash.index", org_slug=session.get("org_slug")))
logger.debug("Callback url: %s", redirect_uri)
logger.debug("Next is: %s", next_path)
@@ -110,7 +86,7 @@ def create_google_oauth_blueprint(app):
flash("Validation error. Please retry.")
return redirect(url_for("redash.login"))
profile = get_user_profile(access_token, logger)
profile = get_user_profile(access_token)
if profile is None:
flash("Validation error. Please retry.")
return redirect(url_for("redash.login"))
@@ -134,9 +110,7 @@ def create_google_oauth_blueprint(app):
if user is None:
return logout_and_redirect_to_index()
unsafe_next_path = session.get("next_url")
if not unsafe_next_path:
unsafe_next_path = build_next_path(org.slug)
unsafe_next_path = session.get("next_url") or url_for("redash.index", org_slug=org.slug)
next_path = get_next_path(unsafe_next_path)
return redirect(next_path)

View File

@@ -1,5 +1,3 @@
import html
import json
import logging
from copy import deepcopy
@@ -39,129 +37,6 @@ class Webex(BaseDestination):
@staticmethod
def formatted_attachments_template(subject, description, query_link, alert_link):
# Attempt to parse the description to find a 2D array
try:
# Extract the part of the description that looks like a JSON array
start_index = description.find("[")
end_index = description.rfind("]") + 1
json_array_str = description[start_index:end_index]
# Decode HTML entities
json_array_str = html.unescape(json_array_str)
# Replace single quotes with double quotes for valid JSON
json_array_str = json_array_str.replace("'", '"')
# Load the JSON array
data_array = json.loads(json_array_str)
# Check if it's a 2D array
if isinstance(data_array, list) and all(isinstance(i, list) for i in data_array):
# Create a table for the Adaptive Card
table_rows = []
for row in data_array:
table_rows.append(
{
"type": "ColumnSet",
"columns": [
{"type": "Column", "items": [{"type": "TextBlock", "text": str(item), "wrap": True}]}
for item in row
],
}
)
# Create the body of the card with the table
body = (
[
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description[:start_index]}",
"isSubtle": True,
"wrap": True,
},
]
+ table_rows
+ [
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
)
else:
# Fallback to the original description if no valid 2D array is found
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description}",
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
except json.JSONDecodeError:
# If parsing fails, fallback to the original description
body = [
{
"type": "TextBlock",
"text": f"{subject}",
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": f"{description}",
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
]
return [
{
"contentType": "application/vnd.microsoft.card.adaptive",
@@ -169,7 +44,44 @@ class Webex(BaseDestination):
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.0",
"body": body,
"body": [
{
"type": "ColumnSet",
"columns": [
{
"type": "Column",
"width": 4,
"items": [
{
"type": "TextBlock",
"text": {subject},
"weight": "bolder",
"size": "medium",
"wrap": True,
},
{
"type": "TextBlock",
"text": {description},
"isSubtle": True,
"wrap": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({query_link}) to check your query!",
"wrap": True,
"isSubtle": True,
},
{
"type": "TextBlock",
"text": f"Click [here]({alert_link}) to check your alert!",
"wrap": True,
"isSubtle": True,
},
],
},
],
}
],
},
}
]
@@ -204,10 +116,6 @@ class Webex(BaseDestination):
# destinations is guaranteed to be a comma-separated string
for destination_id in destinations.split(","):
destination_id = destination_id.strip() # Remove any leading or trailing whitespace
if not destination_id: # Check if the destination_id is empty or blank
continue # Skip to the next iteration if it's empty or blank
payload = deepcopy(template_payload)
payload[payload_tag] = destination_id
self.post_message(payload, headers)

View File

@@ -255,12 +255,6 @@ def number_format_config():
}
def null_value_config():
return {
"nullValue": current_org.get_setting("null_value"),
}
def client_config():
if not current_user.is_api_user() and current_user.is_authenticated:
client_config = {
@@ -278,7 +272,6 @@ def client_config():
"showPermissionsControl": current_org.get_setting("feature_show_permissions_control"),
"hidePlotlyModeBar": current_org.get_setting("hide_plotly_mode_bar"),
"disablePublicUrls": current_org.get_setting("disable_public_urls"),
"multiByteSearchEnabled": current_org.get_setting("multi_byte_search_enabled"),
"allowCustomJSVisualizations": settings.FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS,
"autoPublishNamedQueries": settings.FEATURE_AUTO_PUBLISH_NAMED_QUERIES,
"extendedAlertOptions": settings.FEATURE_EXTENDED_ALERT_OPTIONS,
@@ -296,7 +289,6 @@ def client_config():
client_config.update({"basePath": base_href()})
client_config.update(date_time_format_config())
client_config.update(number_format_config())
client_config.update(null_value_config())
return client_config

View File

@@ -26,8 +26,6 @@ order_map = {
"-name": "-lowercase_name",
"created_at": "created_at",
"-created_at": "-created_at",
"starred_at": "favorites-created_at",
"-starred_at": "-favorites-created_at",
}
order_results = partial(_order_results, default_order="-created_at", allowed_orders=order_map)

View File

@@ -44,8 +44,6 @@ order_map = {
"-executed_at": "-query_results-retrieved_at",
"created_by": "users-name",
"-created_by": "-users-name",
"starred_at": "favorites-created_at",
"-starred_at": "-favorites-created_at",
}
order_results = partial(_order_results, default_order="-created_at", allowed_orders=order_map)

View File

@@ -228,7 +228,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
def _sort_schema(self, schema):
return [
{**i, "columns": sorted(i["columns"], key=lambda x: x["name"] if isinstance(x, dict) else x)}
{"name": i["name"], "columns": sorted(i["columns"], key=lambda x: x["name"] if isinstance(x, dict) else x)}
for i in sorted(schema, key=lambda x: x["name"])
]
@@ -564,7 +564,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
db.session.query(tag_column, usage_count)
.group_by(tag_column)
.filter(Query.id.in_(queries.options(load_only("id"))))
.order_by(tag_column)
.order_by(usage_count.desc())
)
return query
@@ -908,7 +908,6 @@ def next_state(op, value, threshold):
# boolean value is Python specific and most likely will be confusing to
# users.
value = str(value).lower()
value_is_number = False
else:
try:
value = float(value)
@@ -1137,7 +1136,7 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
db.session.query(tag_column, usage_count)
.group_by(tag_column)
.filter(Dashboard.id.in_(dashboards.options(load_only("id"))))
.order_by(tag_column)
.order_by(usage_count.desc())
)
return query
@@ -1145,19 +1144,15 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
def favorites(cls, user, base_query=None):
if base_query is None:
base_query = cls.all(user.org, user.group_ids, user.id)
return (
base_query.distinct(cls.lowercase_name, Dashboard.created_at, Dashboard.slug, Favorite.created_at)
.join(
(
Favorite,
and_(
Favorite.object_type == "Dashboard",
Favorite.object_id == Dashboard.id,
),
)
return base_query.join(
(
Favorite,
and_(
Favorite.object_type == "Dashboard",
Favorite.object_id == Dashboard.id,
),
)
.filter(Favorite.user_id == user.id)
)
).filter(Favorite.user_id == user.id)
@classmethod
def by_user(cls, user):

View File

@@ -288,10 +288,7 @@ class BaseSQLQueryRunner(BaseQueryRunner):
return True
def query_is_select_no_limit(self, query):
parsed_query_list = sqlparse.parse(query)
if len(parsed_query_list) == 0:
return False
parsed_query = parsed_query_list[0]
parsed_query = sqlparse.parse(query)[0]
last_keyword_idx = find_last_keyword_idx(parsed_query)
# Either invalid query or query that is not select
if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT":

View File

@@ -11,12 +11,12 @@ from redash.query_runner import (
from redash.utils import json_loads
try:
from azure.kusto.data import (
from azure.kusto.data.exceptions import KustoServiceError
from azure.kusto.data.request import (
ClientRequestProperties,
KustoClient,
KustoConnectionStringBuilder,
)
from azure.kusto.data.exceptions import KustoServiceError
enabled = True
except ImportError:
@@ -37,34 +37,6 @@ TYPES_MAP = {
}
def _get_data_scanned(kusto_response):
try:
metadata_table = next(
(table for table in kusto_response.tables if table.table_name == "QueryCompletionInformation"),
None,
)
if metadata_table:
resource_usage_json = next(
(row["Payload"] for row in metadata_table.rows if row["EventTypeName"] == "QueryResourceConsumption"),
"{}",
)
resource_usage = json_loads(resource_usage_json).get("resource_usage", {})
data_scanned = (
resource_usage["cache"]["shards"]["cold"]["hitbytes"]
+ resource_usage["cache"]["shards"]["cold"]["missbytes"]
+ resource_usage["cache"]["shards"]["hot"]["hitbytes"]
+ resource_usage["cache"]["shards"]["hot"]["missbytes"]
+ resource_usage["cache"]["shards"]["bypassbytes"]
)
except Exception:
data_scanned = 0
return int(data_scanned)
class AzureKusto(BaseQueryRunner):
should_annotate_query = False
noop_query = "let noop = datatable (Noop:string)[1]; noop"
@@ -72,6 +44,8 @@ class AzureKusto(BaseQueryRunner):
def __init__(self, configuration):
super(AzureKusto, self).__init__(configuration)
self.syntax = "custom"
self.client_request_properties = ClientRequestProperties()
self.client_request_properties.application = "redash"
@classmethod
def configuration_schema(cls):
@@ -86,14 +60,12 @@ class AzureKusto(BaseQueryRunner):
},
"azure_ad_tenant_id": {"type": "string", "title": "Azure AD Tenant Id"},
"database": {"type": "string"},
"msi": {"type": "boolean", "title": "Use Managed Service Identity"},
"user_msi": {
"type": "string",
"title": "User-assigned managed identity client ID",
},
},
"required": [
"cluster",
"azure_ad_client_id",
"azure_ad_client_secret",
"azure_ad_tenant_id",
"database",
],
"order": [
@@ -119,48 +91,18 @@ class AzureKusto(BaseQueryRunner):
return "Azure Data Explorer (Kusto)"
def run_query(self, query, user):
cluster = self.configuration["cluster"]
msi = self.configuration.get("msi", False)
# Managed Service Identity(MSI)
if msi:
# If user-assigned managed identity is used, the client ID must be provided
if self.configuration.get("user_msi"):
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(
cluster,
client_id=self.configuration["user_msi"],
)
else:
kcsb = KustoConnectionStringBuilder.with_aad_managed_service_identity_authentication(cluster)
# Service Principal auth
else:
aad_app_id = self.configuration.get("azure_ad_client_id")
app_key = self.configuration.get("azure_ad_client_secret")
authority_id = self.configuration.get("azure_ad_tenant_id")
if not (aad_app_id and app_key and authority_id):
raise ValueError(
"Azure AD Client ID, Client Secret, and Tenant ID are required for Service Principal authentication."
)
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
connection_string=cluster,
aad_app_id=aad_app_id,
app_key=app_key,
authority_id=authority_id,
)
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
connection_string=self.configuration["cluster"],
aad_app_id=self.configuration["azure_ad_client_id"],
app_key=self.configuration["azure_ad_client_secret"],
authority_id=self.configuration["azure_ad_tenant_id"],
)
client = KustoClient(kcsb)
request_properties = ClientRequestProperties()
request_properties.application = "redash"
if user:
request_properties.user = user.email
request_properties.set_option("request_description", user.email)
db = self.configuration["database"]
try:
response = client.execute(db, query, request_properties)
response = client.execute(db, query, self.client_request_properties)
result_cols = response.primary_results[0].columns
result_rows = response.primary_results[0].rows
@@ -181,15 +123,14 @@ class AzureKusto(BaseQueryRunner):
rows.append(row.to_dict())
error = None
data = {
"columns": columns,
"rows": rows,
"metadata": {"data_scanned": _get_data_scanned(response)},
}
data = {"columns": columns, "rows": rows}
except KustoServiceError as err:
data = None
error = str(err)
try:
error = err.args[1][0]["error"]["@message"]
except (IndexError, KeyError):
error = err.args[1]
return data, error
@@ -202,10 +143,7 @@ class AzureKusto(BaseQueryRunner):
self._handle_run_query_error(error)
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
tables_list = [
*(schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()),
*(schema_as_json["Databases"][self.configuration["database"]]["MaterializedViews"].values()),
]
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()
schema = {}
@@ -216,9 +154,7 @@ class AzureKusto(BaseQueryRunner):
schema[table_name] = {"name": table_name, "columns": []}
for column in table["OrderedColumns"]:
schema[table_name]["columns"].append(
{"name": column["Name"], "type": TYPES_MAP.get(column["CslType"], None)}
)
schema[table_name]["columns"].append(column["Name"])
return list(schema.values())

View File

@@ -12,7 +12,7 @@ from redash.query_runner import (
TYPE_FLOAT,
TYPE_INTEGER,
TYPE_STRING,
BaseSQLQueryRunner,
BaseQueryRunner,
InterruptException,
JobTimeoutException,
register,
@@ -86,7 +86,7 @@ def _get_query_results(jobs, project_id, location, job_id, start_index):
).execute()
logging.debug("query_reply %s", query_reply)
if not query_reply["jobComplete"]:
time.sleep(1)
time.sleep(10)
return _get_query_results(jobs, project_id, location, job_id, start_index)
return query_reply
@@ -98,7 +98,7 @@ def _get_total_bytes_processed_for_resp(bq_response):
return int(bq_response.get("totalBytesProcessed", "0"))
class BigQuery(BaseSQLQueryRunner):
class BigQuery(BaseQueryRunner):
noop_query = "SELECT 1"
def __init__(self, configuration):
@@ -156,11 +156,6 @@ class BigQuery(BaseSQLQueryRunner):
"secret": ["jsonKeyFile"],
}
def annotate_query(self, query, metadata):
# Remove "Job ID" before annotating the query to avoid cache misses
metadata = {k: v for k, v in metadata.items() if k != "Job ID"}
return super().annotate_query(query, metadata)
def _get_bigquery_service(self):
socket.setdefaulttimeout(settings.BIGQUERY_HTTP_TIMEOUT)
@@ -220,12 +215,11 @@ class BigQuery(BaseSQLQueryRunner):
job_data = self._get_job_data(query)
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
self.current_job_id = insert_response["jobReference"]["jobId"]
self.current_job_location = insert_response["jobReference"]["location"]
current_row = 0
query_reply = _get_query_results(
jobs,
project_id=project_id,
location=self.current_job_location,
location=self._get_location(),
job_id=self.current_job_id,
start_index=current_row,
)
@@ -242,11 +236,13 @@ class BigQuery(BaseSQLQueryRunner):
query_result_request = {
"projectId": project_id,
"jobId": self.current_job_id,
"jobId": query_reply["jobReference"]["jobId"],
"startIndex": current_row,
"location": self.current_job_location,
}
if self._get_location():
query_result_request["location"] = self._get_location()
query_reply = jobs.getQueryResults(**query_result_request).execute()
columns = [
@@ -308,70 +304,28 @@ class BigQuery(BaseSQLQueryRunner):
datasets = self._get_project_datasets(project_id)
query_base = """
SELECT table_schema, table_name, field_path, data_type, description
SELECT table_schema, table_name, field_path
FROM `{dataset_id}`.INFORMATION_SCHEMA.COLUMN_FIELD_PATHS
WHERE table_schema NOT IN ('information_schema')
"""
table_query_base = """
SELECT table_schema, table_name, JSON_VALUE(option_value) as table_description
FROM `{dataset_id}`.INFORMATION_SCHEMA.TABLE_OPTIONS
WHERE table_schema NOT IN ('information_schema')
AND option_name = 'description'
"""
location_dataset_ids = {}
schema = {}
queries = []
for dataset in datasets:
dataset_id = dataset["datasetReference"]["datasetId"]
location = dataset["location"]
if self._get_location() and location != self._get_location():
logger.debug("dataset location is different: %s", location)
continue
query = query_base.format(dataset_id=dataset_id)
queries.append(query)
if location not in location_dataset_ids:
location_dataset_ids[location] = []
location_dataset_ids[location].append(dataset_id)
query = "\nUNION ALL\n".join(queries)
results, error = self.run_query(query, None)
if error is not None:
self._handle_run_query_error(error)
for location, datasets in location_dataset_ids.items():
queries = []
for dataset_id in datasets:
query = query_base.format(dataset_id=dataset_id)
queries.append(query)
query = "\nUNION ALL\n".join(queries)
results, error = self.run_query(query, None)
if error is not None:
self._handle_run_query_error(error)
for row in results["rows"]:
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append(
{
"name": row["field_path"],
"type": row["data_type"],
"description": row["description"],
}
)
table_queries = []
for dataset_id in datasets:
table_query = table_query_base.format(dataset_id=dataset_id)
table_queries.append(table_query)
table_query = "\nUNION ALL\n".join(table_queries)
results, error = self.run_query(table_query, None)
if error is not None:
self._handle_run_query_error(error)
for row in results["rows"]:
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
if "table_description" in row:
schema[table_name]["description"] = row["table_description"]
for row in results["rows"]:
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append(row["field_path"])
return list(schema.values())
@@ -405,7 +359,7 @@ class BigQuery(BaseSQLQueryRunner):
self._get_bigquery_service().jobs().cancel(
projectId=self._get_project_id(),
jobId=self.current_job_id,
location=self.current_job_location,
location=self._get_location(),
).execute()
raise

View File

@@ -77,11 +77,7 @@ class ClickHouse(BaseSQLQueryRunner):
self._url = self._url._replace(netloc="{}:{}".format(self._url.hostname, port))
def _get_tables(self, schema):
query = """
SELECT database, table, name, type as data_type
FROM system.columns
WHERE database NOT IN ('system', 'information_schema', 'INFORMATION_SCHEMA')
"""
query = "SELECT database, table, name FROM system.columns WHERE database NOT IN ('system')"
results, error = self.run_query(query, None)
@@ -94,7 +90,7 @@ class ClickHouse(BaseSQLQueryRunner):
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append({"name": row["name"], "type": row["data_type"]})
schema[table_name]["columns"].append(row["name"])
return list(schema.values())

View File

@@ -92,7 +92,7 @@ class BaseElasticSearch(BaseQueryRunner):
logger.setLevel(logging.DEBUG)
self.server_url = self.configuration.get("server", "")
if self.server_url and self.server_url[-1] == "/":
if self.server_url[-1] == "/":
self.server_url = self.server_url[:-1]
basic_auth_user = self.configuration.get("basic_auth_user", None)

View File

@@ -34,13 +34,9 @@ class ResultSet:
def parse_issue(issue, field_mapping): # noqa: C901
result = OrderedDict()
result["key"] = issue["key"]
# Handle API v3 response format: key field may be missing, use id as fallback
result["key"] = issue.get("key", issue.get("id", "unknown"))
# Handle API v3 response format: fields may be missing
fields = issue.get("fields", {})
for k, v in fields.items(): #
for k, v in issue["fields"].items(): #
output_name = field_mapping.get_output_field_name(k)
member_names = field_mapping.get_dict_members(k)
@@ -102,9 +98,7 @@ def parse_issues(data, field_mapping):
def parse_count(data):
results = ResultSet()
# API v3 may not return 'total' field, fallback to counting issues
count = data.get("total", len(data.get("issues", [])))
results.add_row({"count": count})
results.add_row({"count": data["total"]})
return results
@@ -166,26 +160,18 @@ class JiraJQL(BaseHTTPQueryRunner):
self.syntax = "json"
def run_query(self, query, user):
# Updated to API v3 endpoint, fix double slash issue
jql_url = "{}/rest/api/3/search/jql".format(self.configuration["url"].rstrip("/"))
jql_url = "{}/rest/api/2/search".format(self.configuration["url"])
query = json_loads(query)
query_type = query.pop("queryType", "select")
field_mapping = FieldMapping(query.pop("fieldMapping", {}))
# API v3 requires mandatory jql parameter with restrictions
if "jql" not in query or not query["jql"]:
query["jql"] = "created >= -30d order by created DESC"
if query_type == "count":
query["maxResults"] = 1
query["fields"] = ""
else:
query["maxResults"] = query.get("maxResults", 1000)
if "fields" not in query:
query["fields"] = "*all"
response, error = self.get_response(jql_url, params=query)
if error is not None:
return None, error
@@ -196,15 +182,17 @@ class JiraJQL(BaseHTTPQueryRunner):
results = parse_count(data)
else:
results = parse_issues(data, field_mapping)
index = data["startAt"] + data["maxResults"]
# API v3 uses token-based pagination instead of startAt/total
while not data.get("isLast", True) and "nextPageToken" in data:
query["nextPageToken"] = data["nextPageToken"]
while data["total"] > index:
query["startAt"] = index
response, error = self.get_response(jql_url, params=query)
if error is not None:
return None, error
data = response.json()
index = data["startAt"] + data["maxResults"]
addl_results = parse_issues(data, field_mapping)
results.merge(addl_results)

View File

@@ -215,10 +215,10 @@ class MongoDB(BaseQueryRunner):
if readPreference:
kwargs["readPreference"] = readPreference
if self.configuration.get("username"):
if "username" in self.configuration:
kwargs["username"] = self.configuration["username"]
if self.configuration.get("password"):
if "password" in self.configuration:
kwargs["password"] = self.configuration["password"]
db_connection = pymongo.MongoClient(self.configuration["connectionString"], **kwargs)

View File

@@ -150,11 +150,9 @@ class Mysql(BaseSQLQueryRunner):
query = """
SELECT col.table_schema as table_schema,
col.table_name as table_name,
col.column_name as column_name,
col.data_type as data_type,
col.column_comment as column_comment
col.column_name as column_name
FROM `information_schema`.`columns` col
WHERE LOWER(col.table_schema) NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys');
"""
results, error = self.run_query(query, None)
@@ -171,38 +169,7 @@ class Mysql(BaseSQLQueryRunner):
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
schema[table_name]["columns"].append(
{
"name": row["column_name"],
"type": row["data_type"],
"description": row["column_comment"],
}
)
table_query = """
SELECT col.table_schema as table_schema,
col.table_name as table_name,
col.table_comment as table_comment
FROM `information_schema`.`tables` col
WHERE LOWER(col.table_schema) NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys'); \
"""
results, error = self.run_query(table_query, None)
if error is not None:
self._handle_run_query_error(error)
for row in results["rows"]:
if row["table_schema"] != self.configuration["db"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
else:
table_name = row["table_name"]
if table_name not in schema:
schema[table_name] = {"name": table_name, "columns": []}
if "table_comment" in row and row["table_comment"]:
schema[table_name]["description"] = row["table_comment"]
schema[table_name]["columns"].append(row["column_name"])
return list(schema.values())

View File

@@ -205,15 +205,24 @@ class PostgreSQL(BaseSQLQueryRunner):
def _get_tables(self, schema):
"""
relkind constants from https://www.postgresql.org/docs/current/catalog-pg-class.html
relkind constants per https://www.postgresql.org/docs/10/static/catalog-pg-class.html
r = regular table
v = view
m = materialized view
f = foreign table
p = partitioned table (new in 10)
---
i = index
S = sequence
t = TOAST table
c = composite type
"""
query = """
SELECT s.nspname AS table_schema,
c.relname AS table_name,
a.attname AS column_name,
NULL AS data_type
SELECT s.nspname as table_schema,
c.relname as table_name,
a.attname as column_name,
null as data_type
FROM pg_class c
JOIN pg_namespace s
ON c.relnamespace = s.oid
@@ -222,7 +231,7 @@ class PostgreSQL(BaseSQLQueryRunner):
ON a.attrelid = c.oid
AND a.attnum > 0
AND NOT a.attisdropped
WHERE c.relkind = 'm'
WHERE c.relkind IN ('m', 'f', 'p')
AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
AND has_schema_privilege(s.nspname, 'usage')
@@ -234,8 +243,6 @@ class PostgreSQL(BaseSQLQueryRunner):
data_type
FROM information_schema.columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
AND has_table_privilege(table_schema || '.' || table_name, 'select')
AND has_schema_privilege(table_schema, 'usage')
"""
self._get_definitions(schema, query)

View File

@@ -1,14 +1,11 @@
try:
import snowflake.connector
from cryptography.hazmat.primitives.serialization import load_pem_private_key
enabled = True
except ImportError:
enabled = False
from base64 import b64decode
from redash import __version__
from redash.query_runner import (
TYPE_BOOLEAN,
@@ -46,8 +43,6 @@ class Snowflake(BaseSQLQueryRunner):
"account": {"type": "string"},
"user": {"type": "string"},
"password": {"type": "string"},
"private_key_File": {"type": "string"},
"private_key_pwd": {"type": "string"},
"warehouse": {"type": "string"},
"database": {"type": "string"},
"region": {"type": "string", "default": "us-west"},
@@ -62,15 +57,13 @@ class Snowflake(BaseSQLQueryRunner):
"account",
"user",
"password",
"private_key_File",
"private_key_pwd",
"warehouse",
"database",
"region",
"host",
],
"required": ["user", "account", "database", "warehouse"],
"secret": ["password", "private_key_File", "private_key_pwd"],
"required": ["user", "password", "account", "database", "warehouse"],
"secret": ["password"],
"extra_options": [
"host",
],
@@ -95,7 +88,7 @@ class Snowflake(BaseSQLQueryRunner):
if region == "us-west":
region = None
if self.configuration.get("host"):
if self.configuration.__contains__("host"):
host = self.configuration.get("host")
else:
if region:
@@ -103,29 +96,14 @@ class Snowflake(BaseSQLQueryRunner):
else:
host = "{}.snowflakecomputing.com".format(account)
params = {
"user": self.configuration["user"],
"account": account,
"region": region,
"host": host,
"application": "Redash/{} (Snowflake)".format(__version__.split("-")[0]),
}
if self.configuration.get("password"):
params["password"] = self.configuration["password"]
elif self.configuration.get("private_key_File"):
private_key_b64 = self.configuration.get("private_key_File")
private_key_bytes = b64decode(private_key_b64)
if self.configuration.get("private_key_pwd"):
private_key_pwd = self.configuration.get("private_key_pwd").encode()
else:
private_key_pwd = None
private_key_pem = load_pem_private_key(private_key_bytes, private_key_pwd)
params["private_key"] = private_key_pem
else:
raise Exception("Neither password nor private_key_b64 is set.")
connection = snowflake.connector.connect(**params)
connection = snowflake.connector.connect(
user=self.configuration["user"],
password=self.configuration["password"],
account=account,
region=region,
host=host,
application="Redash/{} (Snowflake)".format(__version__.split("-")[0]),
)
return connection

View File

@@ -1,6 +1,6 @@
import functools
from flask import request, session
from flask import session
from flask_login import current_user
from flask_talisman import talisman
from flask_wtf.csrf import CSRFProtect, generate_csrf
@@ -25,7 +25,6 @@ def init_app(app):
app.config["WTF_CSRF_CHECK_DEFAULT"] = False
app.config["WTF_CSRF_SSL_STRICT"] = False
app.config["WTF_CSRF_TIME_LIMIT"] = settings.CSRF_TIME_LIMIT
app.config["SESSION_COOKIE_NAME"] = settings.SESSION_COOKIE_NAME
@app.after_request
def inject_csrf_token(response):
@@ -36,15 +35,6 @@ def init_app(app):
@app.before_request
def check_csrf():
# BEGIN workaround until https://github.com/lepture/flask-wtf/pull/419 is merged
if request.blueprint in csrf._exempt_blueprints:
return
view = app.view_functions.get(request.endpoint)
if view is not None and f"{view.__module__}.{view.__name__}" in csrf._exempt_views:
return
# END workaround
if not current_user.is_authenticated or "user_id" in session:
csrf.protect()

View File

@@ -82,19 +82,9 @@ class QuerySerializer(Serializer):
else:
result = [serialize_query(query, **self.options) for query in self.object_or_list]
if self.options.get("with_favorite_state", True):
queries = list(self.object_or_list)
favorites = models.Favorite.query.filter(
models.Favorite.object_id.in_([o.id for o in queries]),
models.Favorite.object_type == "Query",
models.Favorite.user_id == current_user.id,
)
favorites_dict = {fav.object_id: fav for fav in favorites}
favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list)
for query in result:
favorite = favorites_dict.get(query["id"])
query["is_favorite"] = favorite is not None
if favorite:
query["starred_at"] = favorite.created_at
query["is_favorite"] = query["id"] in favorite_ids
return result
@@ -273,19 +263,9 @@ class DashboardSerializer(Serializer):
else:
result = [serialize_dashboard(obj, **self.options) for obj in self.object_or_list]
if self.options.get("with_favorite_state", True):
dashboards = list(self.object_or_list)
favorites = models.Favorite.query.filter(
models.Favorite.object_id.in_([o.id for o in dashboards]),
models.Favorite.object_type == "Dashboard",
models.Favorite.user_id == current_user.id,
)
favorites_dict = {fav.object_id: fav for fav in favorites}
for query in result:
favorite = favorites_dict.get(query["id"])
query["is_favorite"] = favorite is not None
if favorite:
query["starred_at"] = favorite.created_at
favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list)
for obj in result:
obj["is_favorite"] = obj["id"] in favorite_ids
return result

View File

@@ -82,7 +82,6 @@ SESSION_COOKIE_SECURE = parse_boolean(os.environ.get("REDASH_SESSION_COOKIE_SECU
# Whether the session cookie is set HttpOnly.
SESSION_COOKIE_HTTPONLY = parse_boolean(os.environ.get("REDASH_SESSION_COOKIE_HTTPONLY", "true"))
SESSION_EXPIRY_TIME = int(os.environ.get("REDASH_SESSION_EXPIRY_TIME", 60 * 60 * 6))
SESSION_COOKIE_NAME = os.environ.get("REDASH_SESSION_COOKIE_NAME", "session")
# Whether the session cookie is set to secure.
REMEMBER_COOKIE_SECURE = parse_boolean(os.environ.get("REDASH_REMEMBER_COOKIE_SECURE") or str(COOKIES_SECURE))
@@ -136,13 +135,6 @@ FEATURE_POLICY = os.environ.get("REDASH_FEATURE_POLICY", "")
MULTI_ORG = parse_boolean(os.environ.get("REDASH_MULTI_ORG", "false"))
# If Redash is behind a proxy it might sometimes receive a X-Forwarded-Proto of HTTP
# even if your actual Redash URL scheme is HTTPS. This will cause Flask to build
# the OAuth redirect URL incorrectly thus failing auth. This is especially common if
# you're behind a SSL/TCP configured AWS ELB or similar.
# This setting will force the URL scheme.
GOOGLE_OAUTH_SCHEME_OVERRIDE = os.environ.get("REDASH_GOOGLE_OAUTH_SCHEME_OVERRIDE", "")
GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = bool(GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET)

View File

@@ -27,7 +27,6 @@ DATE_FORMAT = os.environ.get("REDASH_DATE_FORMAT", "DD/MM/YY")
TIME_FORMAT = os.environ.get("REDASH_TIME_FORMAT", "HH:mm")
INTEGER_FORMAT = os.environ.get("REDASH_INTEGER_FORMAT", "0,0")
FLOAT_FORMAT = os.environ.get("REDASH_FLOAT_FORMAT", "0,0.00")
NULL_VALUE = os.environ.get("REDASH_NULL_VALUE", "null")
MULTI_BYTE_SEARCH_ENABLED = parse_boolean(os.environ.get("MULTI_BYTE_SEARCH_ENABLED", "false"))
JWT_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_JWT_LOGIN_ENABLED", "false"))
@@ -60,7 +59,6 @@ settings = {
"time_format": TIME_FORMAT,
"integer_format": INTEGER_FORMAT,
"float_format": FLOAT_FORMAT,
"null_value": NULL_VALUE,
"multi_byte_search_enabled": MULTI_BYTE_SEARCH_ENABLED,
"auth_jwt_login_enabled": JWT_LOGIN_ENABLED,
"auth_jwt_auth_issuer": JWT_AUTH_ISSUER,

View File

@@ -6,7 +6,6 @@ import decimal
import hashlib
import io
import json
import math
import os
import random
import re
@@ -121,17 +120,6 @@ def json_loads(data, *args, **kwargs):
return json.loads(data, *args, **kwargs)
# Convert NaN, Inf, and -Inf to None, as they are not valid JSON values.
def _sanitize_data(data):
if isinstance(data, dict):
return {k: _sanitize_data(v) for k, v in data.items()}
if isinstance(data, list):
return [_sanitize_data(v) for v in data]
if isinstance(data, float) and (math.isnan(data) or math.isinf(data)):
return None
return data
def json_dumps(data, *args, **kwargs):
"""A custom JSON dumping function which passes all parameters to the
json.dumps function."""
@@ -140,7 +128,7 @@ def json_dumps(data, *args, **kwargs):
# Float value nan or inf in Python should be render to None or null in json.
# Using allow_nan = True will make Python render nan as NaN, leading to parse error in front-end
kwargs.setdefault("allow_nan", False)
return json.dumps(_sanitize_data(data), *args, **kwargs)
return json.dumps(data, *args, **kwargs)
def mustache_render(template, context=None, **kwargs):

View File

@@ -33,7 +33,7 @@ from sqlalchemy.orm import mapperlib
from sqlalchemy.orm.properties import ColumnProperty
from sqlalchemy.orm.query import _ColumnEntity
from sqlalchemy.orm.util import AliasedInsp
from sqlalchemy.sql.expression import asc, desc, nullslast
from sqlalchemy.sql.expression import asc, desc
def get_query_descriptor(query, entity, attr):
@@ -225,7 +225,7 @@ class QuerySorter:
def assign_order_by(self, entity, attr, func):
expr = get_query_descriptor(self.query, entity, attr)
if expr is not None:
return self.query.order_by(nullslast(func(expr)))
return self.query.order_by(func(expr))
if not self.silent:
raise QuerySorterException("Could not sort query with expression '%s'" % attr)
return self.query

View File

@@ -261,19 +261,15 @@ def test_webex_notify_calls_requests_post():
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234,5678",
"to_person_emails": "example1@test.com,example2@test.com",
}
options = {"webex_bot_token": "abcd", "to_room_ids": "1234"}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
@@ -281,7 +277,7 @@ def test_webex_notify_calls_requests_post():
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_response.status_code = 204
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
@@ -289,111 +285,13 @@ def test_webex_notify_calls_requests_post():
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload_room = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
expected_payload_email = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"toPersonEmail": "example1@test.com",
}
# Check that requests.post was called for both roomId and toPersonEmail destinations
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_room,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
mock_post.assert_any_call(
destination.api_base_url,
json=expected_payload_email,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
def test_webex_notify_handles_blank_entries():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "",
"to_person_emails": "",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
destination.notify(alert, query, user, new_state, app, host, metadata, options)
# Ensure no API calls are made when destinations are blank
mock_post.assert_not_called()
def test_webex_notify_handles_2d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with table [['Col1', 'Col2'], ['Val1', 'Val2']]"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
formatted_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"attachments": formatted_attachments,
"roomId": "1234",
}
@@ -404,60 +302,7 @@ def test_webex_notify_handles_2d_array():
timeout=5.0,
)
assert mock_response.status_code == 200
def test_webex_notify_handles_1d_array():
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test custom subject"
alert.custom_body = "Test custom body with 1D array, however unlikely ['Col1', 'Col2']"
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
query = mock.Mock()
query.id = 1
user = mock.Mock()
app = mock.Mock()
host = "https://localhost:5000"
options = {
"webex_bot_token": "abcd",
"to_room_ids": "1234",
}
metadata = {"Scheduled": False}
new_state = Alert.TRIGGERED_STATE
destination = Webex(options)
with mock.patch("redash.destinations.webex.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
query_link = f"{host}/queries/{query.id}"
alert_link = f"{host}/alerts/{alert.id}"
expected_attachments = Webex.formatted_attachments_template(
alert.custom_subject, alert.custom_body, query_link, alert_link
)
expected_payload = {
"markdown": alert.custom_subject + "\n" + alert.custom_body,
"attachments": expected_attachments,
"roomId": "1234",
}
mock_post.assert_called_once_with(
destination.api_base_url,
json=expected_payload,
headers={"Authorization": "Bearer abcd"},
timeout=5.0,
)
assert mock_response.status_code == 200
assert mock_response.status_code == 204
def test_datadog_notify_calls_requests_post():

View File

@@ -1,42 +0,0 @@
from unittest import TestCase
from unittest.mock import patch
from redash.query_runner.azure_kusto import AzureKusto
class TestAzureKusto(TestCase):
def setUp(self):
self.configuration = {
"cluster": "https://example.kusto.windows.net",
"database": "sample_db",
"azure_ad_client_id": "client_id",
"azure_ad_client_secret": "client_secret",
"azure_ad_tenant_id": "tenant_id",
}
self.kusto = AzureKusto(self.configuration)
@patch.object(AzureKusto, "run_query")
def test_get_schema(self, mock_run_query):
mock_response = {
"rows": [
{
"DatabaseSchema": '{"Databases":{"sample_db":{"Tables":{"Table1":{"Name":"Table1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}},"MaterializedViews":{"View1":{"Name":"View1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}}}}}'
}
]
}
mock_run_query.return_value = (mock_response, None)
expected_schema = [
{
"name": "Table1",
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
},
{
"name": "View1",
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
},
]
schema = self.kusto.get_schema()
print(schema)
self.assertEqual(schema, expected_schema)

View File

@@ -20,7 +20,7 @@ class TestBigQueryQueryRunner(unittest.TestCase):
query = "SELECT a FROM tbl"
expect = (
"/* Username: username, query_id: adhoc, "
"Query Hash: query-hash, "
"Job ID: job-id, Query Hash: query-hash, "
"Scheduled: False */ SELECT a FROM tbl"
)

View File

@@ -1,31 +0,0 @@
from redash.utils import json_dumps, json_loads
from tests import BaseTestCase
class TestJsonDumps(BaseTestCase):
"""
NaN, Inf, and -Inf are sanitized to None.
"""
def test_data_with_nan_is_sanitized(self):
input_data = {
"columns": [
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
],
"rows": [{"_col0": 1.0, "_col1": float("nan"), "_col2": float("inf"), "_col3": float("-inf")}],
}
expected_output_data = {
"columns": [
{"name": "_col0", "friendly_name": "_col0", "type": "float"},
{"name": "_col1", "friendly_name": "_col1", "type": "float"},
{"name": "_col2", "friendly_name": "_col1", "type": "float"},
{"name": "_col3", "friendly_name": "_col1", "type": "float"},
],
"rows": [{"_col0": 1.0, "_col1": None, "_col2": None, "_col3": None}],
}
json_data = json_dumps(input_data)
actual_output_data = json_loads(json_data)
self.assertEqual(actual_output_data, expected_output_data)

View File

@@ -46,7 +46,7 @@
"@types/jest": "^26.0.18",
"@types/leaflet": "^1.5.19",
"@types/numeral": "0.0.28",
"@types/plotly.js": "^3.0.3",
"@types/plotly.js": "^1.54.22",
"@types/react": "^17.0.0",
"@types/react-dom": "^17.0.0",
"@types/tinycolor2": "^1.4.2",
@@ -62,7 +62,7 @@
"less-loader": "^11.1.3",
"less-plugin-autoprefix": "^2.0.0",
"npm-run-all": "^4.1.5",
"prettier": "3.3.2",
"prettier": "^1.19.1",
"prop-types": "^15.7.2",
"style-loader": "^3.3.3",
"ts-migrate": "^0.1.35",
@@ -91,7 +91,7 @@
"leaflet.markercluster": "^1.1.0",
"lodash": "^4.17.10",
"numeral": "^2.0.6",
"plotly.js": "3.1.0",
"plotly.js": "1.58.5",
"react-pivottable": "^0.9.0",
"react-sortable-hoc": "^1.10.1",
"tinycolor2": "^1.4.1",

View File

@@ -5,7 +5,6 @@ import numeral from "numeral";
import { isString, isArray, isUndefined, isFinite, isNil, toString } from "lodash";
import { visualizationsSettings } from "@/visualizations/visualizationsSettings";
numeral.options.scalePercentBy100 = false;
// eslint-disable-next-line
@@ -13,16 +12,9 @@ const urlPattern = /(^|[\s\n]|<br\/?>)((?:https?|ftp):\/\/[\-A-Z0-9+\u0026\u2019
const hasOwnProperty = Object.prototype.hasOwnProperty;
function NullValueComponent() {
return <span className="display-as-null">{visualizationsSettings.nullValue}</span>;
}
export function createTextFormatter(highlightLinks: any) {
if (highlightLinks) {
return (value: any) => {
if (value === null) {
return <NullValueComponent/>
}
if (isString(value)) {
const Link = visualizationsSettings.LinkComponent;
value = value.replace(urlPattern, (unused, prefix, href) => {
@@ -37,7 +29,7 @@ export function createTextFormatter(highlightLinks: any) {
return toString(value);
};
}
return (value: any) => value === null ? <NullValueComponent/> : toString(value);
return (value: any) => toString(value);
}
function toMoment(value: any) {
@@ -54,14 +46,11 @@ function toMoment(value: any) {
export function createDateTimeFormatter(format: any) {
if (isString(format) && format !== "") {
return (value: any) => {
if (value === null) {
return <NullValueComponent/>;
}
const wrapped = toMoment(value);
return wrapped.isValid() ? wrapped.format(format) : toString(value);
};
}
return (value: any) => value === null ? <NullValueComponent/> : toString(value);
return (value: any) => toString(value);
}
export function createBooleanFormatter(values: any) {
@@ -69,9 +58,6 @@ export function createBooleanFormatter(values: any) {
if (values.length >= 2) {
// Both `true` and `false` specified
return (value: any) => {
if (value === null) {
return <NullValueComponent/>;
}
if (isNil(value)) {
return "";
}
@@ -83,9 +69,6 @@ export function createBooleanFormatter(values: any) {
}
}
return (value: any) => {
if (value === null) {
return <NullValueComponent/>;
}
if (isNil(value)) {
return "";
}
@@ -93,20 +76,12 @@ export function createBooleanFormatter(values: any) {
};
}
export function createNumberFormatter(format: any, canReturnHTMLElement: boolean = false) {
export function createNumberFormatter(format: any) {
if (isString(format) && format !== "") {
const n = numeral(0); // cache `numeral` instance
return (value: any) => {
if (canReturnHTMLElement && value === null) {
return <NullValueComponent/>;
}
if (value === "" || value === null) {
return "";
}
return n.set(value).format(format);
}
return (value: any) => (value === null || value === "" ? "" : n.set(value).format(format));
}
return (value: any) => (canReturnHTMLElement && value === null) ? <NullValueComponent/> : toString(value);
return (value: any) => toString(value);
}
export function formatSimpleTemplate(str: any, data: any) {

View File

@@ -336,7 +336,7 @@ export default function GeneralSettings({ options, data, onOptionsChange }: any)
</Section>
)}
{!includes(["custom", "heatmap", "bubble"], options.globalSeriesType) && (
{!includes(["custom", "heatmap", "bubble", "scatter"], options.globalSeriesType) && (
// @ts-expect-error ts-migrate(2745) FIXME: This JSX tag's 'children' prop expects type 'never... Remove this comment to see the full error message
<Section>
<Select

View File

@@ -10,7 +10,7 @@ export default {
Renderer,
Editor,
defaultColumns: 6,
defaultColumns: 3,
defaultRows: 8,
minColumns: 1,
minRows: 5,

View File

@@ -48,6 +48,7 @@
"series": [
{
"visible": true,
"offsetgroup": "0",
"type": "bar",
"name": "a",
"x": ["x1", "x2", "x3", "x4"],
@@ -63,6 +64,7 @@
},
{
"visible": true,
"offsetgroup": "1",
"type": "bar",
"name": "b",
"x": ["x1", "x2", "x3", "x4"],

View File

@@ -48,6 +48,7 @@
"series": [
{
"visible": true,
"offsetgroup": "0",
"type": "bar",
"name": "a",
"x": ["x1", "x2", "x3", "x4"],
@@ -63,6 +64,7 @@
},
{
"visible": true,
"offsetgroup": "1",
"type": "bar",
"name": "b",
"x": ["x1", "x2", "x3", "x4"],

View File

@@ -27,17 +27,15 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
},
},
"hoverlabel": {
"namelength": -1
}

View File

@@ -30,13 +30,11 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
@@ -44,13 +42,12 @@
"yaxis2": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null,
"overlaying": "y",
"side": "right"
},
},
"hoverlabel": {
"namelength": -1
}

View File

@@ -25,21 +25,18 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
},
},
"hoverlabel": {
"namelength": -1
},
"hovermode": "x"
}
}
}
}

View File

@@ -28,13 +28,11 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
@@ -42,17 +40,15 @@
"yaxis2": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null,
"overlaying": "y",
"side": "right"
},
},
"hoverlabel": {
"namelength": -1
},
"hovermode": "x"
}
}
}
}

View File

@@ -24,21 +24,18 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
},
},
"hoverlabel": {
"namelength": -1
},
"hovermode": "x"
}
}
}
}

View File

@@ -23,21 +23,18 @@
"automargin": true,
"showticklabels": true,
"title": null,
"tickformat": null,
"type": "-"
},
"yaxis": {
"automargin": true,
"title": null,
"tickformat": null,
"type": "linear",
"autorange": true,
"range": null
},
},
"hoverlabel": {
"namelength": -1
},
"hovermode": "x"
}
}
}
}

View File

@@ -1,6 +1,5 @@
import * as Plotly from "plotly.js";
import "./locales"
import prepareData from "./prepareData";
import prepareLayout from "./prepareLayout";
import updateData from "./updateData";
@@ -8,31 +7,9 @@ import updateAxes from "./updateAxes";
import updateChartSize from "./updateChartSize";
import { prepareCustomChartData, createCustomChartRenderer } from "./customChartUtils";
const rangeSliderIcon = {
'width': 400,
'height': 400,
'path': 'M50 180h300a20 20 0 0 1 0 40H50a20 20 0 0 1 0-40z M160 200a40 40 0 1 0 -80 0a40 40 0 1 0 80 0 M320 200a40 40 0 1 0 -80 0a40 40 0 1 0 80 0',
};
// @ts-expect-error ts-migrate(2339) FIXME: Property 'setPlotConfig' does not exist on type 't... Remove this comment to see the full error message
Plotly.setPlotConfig({
modeBarButtonsToRemove: ["sendDataToCloud"],
modeBarButtonsToAdd: ["togglespikelines", "v1hovermode",
{
name: 'toggleRangeslider',
title: 'Toggle rangeslider',
icon: rangeSliderIcon,
click: function(gd: any) {
if(gd?.layout?.xaxis) {
let newRangeslider: any = {};
if (gd.layout.xaxis?.rangeslider) {
newRangeslider = null;
}
(Plotly.relayout as any)(gd, 'xaxis.rangeslider', newRangeslider);
}
}
},
],
locale: window.navigator.language,
});
export {

View File

@@ -1,230 +0,0 @@
import * as Plotly from "plotly.js";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAf from "plotly.js/lib/locales/af";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAm from "plotly.js/lib/locales/am";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAr_dz from "plotly.js/lib/locales/ar-dz";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAr_eg from "plotly.js/lib/locales/ar-eg";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAr from "plotly.js/lib/locales/ar";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeAz from "plotly.js/lib/locales/az";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeBg from "plotly.js/lib/locales/bg";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeBs from "plotly.js/lib/locales/bs";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeCa from "plotly.js/lib/locales/ca";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeCs from "plotly.js/lib/locales/cs";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeCy from "plotly.js/lib/locales/cy";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeDa from "plotly.js/lib/locales/da";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeDe_ch from "plotly.js/lib/locales/de-ch";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeDe from "plotly.js/lib/locales/de";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEl from "plotly.js/lib/locales/el";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEo from "plotly.js/lib/locales/eo";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEs_ar from "plotly.js/lib/locales/es-ar";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEs_pe from "plotly.js/lib/locales/es-pe";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEs from "plotly.js/lib/locales/es";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEt from "plotly.js/lib/locales/et";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeEu from "plotly.js/lib/locales/eu";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFa from "plotly.js/lib/locales/fa";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFi from "plotly.js/lib/locales/fi";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFo from "plotly.js/lib/locales/fo";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFr_ch from "plotly.js/lib/locales/fr-ch";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeFr from "plotly.js/lib/locales/fr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeGl from "plotly.js/lib/locales/gl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeGu from "plotly.js/lib/locales/gu";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHe from "plotly.js/lib/locales/he";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHi_in from "plotly.js/lib/locales/hi-in";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHr from "plotly.js/lib/locales/hr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHu from "plotly.js/lib/locales/hu";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeHy from "plotly.js/lib/locales/hy";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeId from "plotly.js/lib/locales/id";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeIs from "plotly.js/lib/locales/is";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeIt from "plotly.js/lib/locales/it";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeJa from "plotly.js/lib/locales/ja";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeKa from "plotly.js/lib/locales/ka";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeKm from "plotly.js/lib/locales/km";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeKo from "plotly.js/lib/locales/ko";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeLt from "plotly.js/lib/locales/lt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeLv from "plotly.js/lib/locales/lv";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMe_me from "plotly.js/lib/locales/me-me";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMe from "plotly.js/lib/locales/me";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMk from "plotly.js/lib/locales/mk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMl from "plotly.js/lib/locales/ml";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMs from "plotly.js/lib/locales/ms";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeMt from "plotly.js/lib/locales/mt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeNl_be from "plotly.js/lib/locales/nl-be";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeNl from "plotly.js/lib/locales/nl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeNo from "plotly.js/lib/locales/no";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePa from "plotly.js/lib/locales/pa";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePl from "plotly.js/lib/locales/pl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePt_br from "plotly.js/lib/locales/pt-br";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localePt_pt from "plotly.js/lib/locales/pt-pt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeRm from "plotly.js/lib/locales/rm";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeRo from "plotly.js/lib/locales/ro";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeRu from "plotly.js/lib/locales/ru";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSk from "plotly.js/lib/locales/sk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSl from "plotly.js/lib/locales/sl";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSq from "plotly.js/lib/locales/sq";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSr_sr from "plotly.js/lib/locales/sr-sr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSr from "plotly.js/lib/locales/sr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSv from "plotly.js/lib/locales/sv";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeSw from "plotly.js/lib/locales/sw";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTa from "plotly.js/lib/locales/ta";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTh from "plotly.js/lib/locales/th";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTr from "plotly.js/lib/locales/tr";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeTt from "plotly.js/lib/locales/tt";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeUk from "plotly.js/lib/locales/uk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeUr from "plotly.js/lib/locales/ur";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeVi from "plotly.js/lib/locales/vi";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeZh_cn from "plotly.js/lib/locales/zh-cn";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeZh_hk from "plotly.js/lib/locales/zh-hk";
// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module
import localeZh_tw from "plotly.js/lib/locales/zh-tw";
(Plotly as any).register([
localeAf,
localeAm,
localeAr_dz,
localeAr_eg,
localeAr,
localeAz,
localeBg,
localeBs,
localeCa,
localeCs,
localeCy,
localeDa,
localeDe_ch,
localeDe,
localeEl,
localeEo,
localeEs_ar,
localeEs_pe,
localeEs,
localeEt,
localeEu,
localeFa,
localeFi,
localeFo,
localeFr_ch,
localeFr,
localeGl,
localeGu,
localeHe,
localeHi_in,
localeHr,
localeHu,
localeHy,
localeId,
localeIs,
localeIt,
localeJa,
localeKa,
localeKm,
localeKo,
localeLt,
localeLv,
localeMe_me,
localeMe,
localeMk,
localeMl,
localeMs,
localeMt,
localeNl_be,
localeNl,
localeNo,
localePa,
localePl,
localePt_br,
localePt_pt,
localeRm,
localeRo,
localeRu,
localeSk,
localeSl,
localeSq,
localeSr_sr,
localeSr,
localeSv,
localeSw,
localeTa,
localeTh,
localeTr,
localeTt,
localeUk,
localeUr,
localeVi,
localeZh_cn,
localeZh_hk,
localeZh_tw,
]);

View File

@@ -26,13 +26,9 @@ function getHoverInfoPattern(options: any) {
function prepareBarSeries(series: any, options: any, additionalOptions: any) {
series.type = "bar";
if (!options.series.stacking) {
series.offsetgroup = toString(additionalOptions.index);
}
series.offsetgroup = toString(additionalOptions.index);
if (options.showDataLabels) {
series.textposition = "inside";
} else {
series.textposition = "none";
}
return series;
}
@@ -96,18 +92,15 @@ function prepareSeries(series: any, options: any, numSeries: any, additionalOpti
// For bubble/scatter charts `y` may be any (similar to `x`) - numeric is only bubble size;
// for other types `y` is always number
const cleanYValue = includes(["bubble", "scatter"], seriesOptions.type)
? (v: any, axixType: any) => {
v = normalizeValue(v, axixType);
return includes(["scatter"], seriesOptions.type) && options.missingValuesAsZero && isNil(v) ? 0.0 : v;
}
? normalizeValue
: (v: any) => {
v = cleanNumber(v);
return options.missingValuesAsZero && isNil(v) ? 0.0 : v;
};
const sourceData = new Map();
const xValues: any[] = [];
const yValues: any[] = [];
const labelsValuesMap = new Map();
const yErrorValues: any = [];
each(data, row => {
@@ -115,20 +108,27 @@ function prepareSeries(series: any, options: any, numSeries: any, additionalOpti
const y = cleanYValue(row.y, seriesYAxis === "y2" ? options.yAxis[1].type : options.yAxis[0].type); // depends on series type!
const yError = cleanNumber(row.yError); // always number
const size = cleanNumber(row.size); // always number
if (labelsValuesMap.has(x)) {
labelsValuesMap.set(x, labelsValuesMap.get(x) + y);
} else {
labelsValuesMap.set(x, y);
}
const aggregatedY = labelsValuesMap.get(x);
sourceData.set(x, {
x,
y,
y: aggregatedY,
yError,
size,
yPercent: null, // will be updated later
row,
});
xValues.push(x);
yValues.push(y);
yErrorValues.push(yError);
});
const xValues = Array.from(labelsValuesMap.keys());
const yValues = Array.from(labelsValuesMap.values());
const plotlySeries = {
visible: true,
hoverinfo: hoverInfoPattern,

View File

@@ -21,7 +21,7 @@ function prepareXAxis(axisOptions: any, additionalOptions: any) {
title: getAxisTitle(axisOptions),
type: getAxisScaleType(axisOptions),
automargin: true,
tickformat: axisOptions.tickFormat ?? null,
tickformat: axisOptions.tickFormat,
};
if (additionalOptions.sortX && axis.type === "category") {
@@ -49,7 +49,7 @@ function prepareYAxis(axisOptions: any) {
automargin: true,
autorange: true,
range: null,
tickformat: axisOptions.tickFormat ?? null,
tickformat: axisOptions.tickFormat,
};
}
@@ -109,7 +109,7 @@ function prepareBoxLayout(layout: any, options: any, data: any) {
}
export default function prepareLayout(element: any, options: any, data: any) {
const layout: any = {
const layout = {
margin: { l: 10, r: 10, b: 5, t: 20, pad: 4 },
// plot size should be at least 5x5px
width: Math.max(5, Math.floor(element.offsetWidth)),
@@ -124,10 +124,6 @@ export default function prepareLayout(element: any, options: any, data: any) {
},
};
if (["line", "area", "column"].includes(options.globalSeriesType)) {
layout.hovermode = options.swappedAxes ? 'y' : 'x';
}
switch (options.globalSeriesType) {
case "pie":
return preparePieLayout(layout, options, data);

View File

@@ -9,7 +9,7 @@ export default {
Renderer,
Editor,
defaultColumns: 6,
defaultColumns: 3,
defaultRows: 8,
minColumns: 2,
};

View File

@@ -22,6 +22,6 @@ export default {
Renderer,
Editor,
defaultColumns: 4,
defaultColumns: 2,
defaultRows: 5,
};

View File

@@ -10,6 +10,6 @@ export default {
...options,
}),
Renderer: DetailsRenderer,
defaultColumns: 4,
defaultColumns: 2,
defaultRows: 2,
};

View File

@@ -9,7 +9,7 @@ export default {
Renderer,
Editor,
defaultColumns: 6,
defaultColumns: 3,
defaultRows: 8,
minColumns: 2,
};

View File

@@ -23,6 +23,6 @@ export default {
Editor,
defaultRows: 10,
defaultColumns: 6,
defaultColumns: 3,
minColumns: 2,
};

View File

@@ -5,7 +5,7 @@ Object {
"columns": Array [
Object {
"alignContent": "right",
"allowHTML": false,
"allowHTML": true,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -23,7 +23,6 @@ Object {
"linkTitleTemplate": "{{ @ }}",
"linkUrlTemplate": "{{ @ }}",
"name": "a",
"nullValue": "null",
"numberFormat": undefined,
"order": 100000,
"title": "a",
@@ -39,7 +38,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": false,
"allowHTML": true,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -57,7 +56,6 @@ Object {
"linkTitleTemplate": "{{ @ }}",
"linkUrlTemplate": "{{ @ }}",
"name": "a",
"nullValue": "null",
"numberFormat": undefined,
"order": 100000,
"title": "a",
@@ -73,7 +71,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": false,
"allowHTML": true,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -91,7 +89,6 @@ Object {
"linkTitleTemplate": "{{ @ }}",
"linkUrlTemplate": "{{ @ }}",
"name": "a",
"nullValue": "null",
"numberFormat": undefined,
"order": 100000,
"title": "test",
@@ -107,7 +104,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": false,
"allowHTML": true,
"allowSearch": true,
"booleanValues": Array [
"false",
@@ -125,7 +122,6 @@ Object {
"linkTitleTemplate": "{{ @ }}",
"linkUrlTemplate": "{{ @ }}",
"name": "a",
"nullValue": "null",
"numberFormat": undefined,
"order": 100000,
"title": "a",
@@ -141,7 +137,7 @@ Object {
"columns": Array [
Object {
"alignContent": "left",
"allowHTML": false,
"allowHTML": true,
"allowSearch": false,
"booleanValues": Array [
"false",
@@ -159,7 +155,6 @@ Object {
"linkTitleTemplate": "{{ @ }}",
"linkUrlTemplate": "{{ @ }}",
"name": "a",
"nullValue": "null",
"numberFormat": undefined,
"order": 100000,
"title": "a",

View File

@@ -33,7 +33,7 @@ function Editor({ column, onChange }: Props) {
}
export default function initNumberColumn(column: any) {
const format = createNumberFormatter(column.numberFormat, true);
const format = createNumberFormatter(column.numberFormat);
function prepareData(row: any) {
return {

View File

@@ -52,7 +52,7 @@ export default function initTextColumn(column: any) {
function TextColumn({ row }: any) {
// eslint-disable-line react/prop-types
const { text } = prepareData(row);
return (column.allowHTML && typeof text === 'string') ? <HtmlContent>{text}</HtmlContent> : text;
return column.allowHTML ? <HtmlContent>{text}</HtmlContent> : text;
}
TextColumn.prepareData = prepareData;

View File

@@ -54,7 +54,7 @@ function getDefaultColumnsOptions(columns: any) {
allowSearch: false,
alignContent: getColumnContentAlignment(col.type),
// `string` cell options
allowHTML: false,
allowHTML: true,
highlightLinks: false,
}));
}
@@ -73,7 +73,6 @@ function getDefaultFormatOptions(column: any) {
dateTimeFormat: dateTimeFormat[column.type],
// @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message
numberFormat: numberFormat[column.type],
nullValue: visualizationsSettings.nullValue,
booleanValues: visualizationsSettings.booleanValues || ["false", "true"],
// `image` cell options
imageUrlTemplate: "{{ @ }}",

View File

@@ -11,6 +11,6 @@ export default {
autoHeight: true,
defaultRows: 14,
defaultColumns: 6,
defaultColumns: 3,
minColumns: 2,
};

View File

@@ -39,11 +39,6 @@
white-space: nowrap;
}
.display-as-null {
font-style: italic;
color: @text-muted;
}
.table-visualization-spacer {
padding-left: 0;
padding-right: 0;

View File

@@ -42,7 +42,6 @@ export const visualizationsSettings = {
dateTimeFormat: "DD/MM/YYYY HH:mm",
integerFormat: "0,0",
floatFormat: "0,0.00",
nullValue: "null",
booleanValues: ["false", "true"],
tableCellMaxJSONSize: 50000,
allowCustomJSVisualizations: false,

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,7 @@
const webpack = require("webpack");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const WebpackBuildNotifierPlugin = require("webpack-build-notifier");
const { WebpackManifestPlugin } = require("webpack-manifest-plugin");
const ManifestPlugin = require("webpack-manifest-plugin");
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
const CopyWebpackPlugin = require("copy-webpack-plugin");
const LessPluginAutoPrefix = require("less-plugin-autoprefix");
@@ -76,6 +76,8 @@ const config = {
publicPath: staticPath
},
node: {
fs: "empty",
path: "empty"
},
resolve: {
symlinks: false,
@@ -83,14 +85,6 @@ const config = {
alias: {
"@": appPath,
extensions: extensionPath
},
fallback: {
fs: false,
url: require.resolve("url/"),
stream: require.resolve("stream-browserify"),
assert: require.resolve("assert/"),
util: require.resolve("util/"),
process: require.resolve("process/browser"),
}
},
plugins: [
@@ -115,7 +109,7 @@ const config = {
new MiniCssExtractPlugin({
filename: "[name].[chunkhash].css"
}),
new WebpackManifestPlugin({
new ManifestPlugin({
fileName: "asset-manifest.json",
publicPath: ""
}),
@@ -128,13 +122,7 @@ const config = {
{ from: "client/app/assets/fonts", to: "fonts/" }
],
}),
isHotReloadingEnabled && new ReactRefreshWebpackPlugin({ overlay: false }),
new webpack.ProvidePlugin({
// Make a global `process` variable that points to the `process` package,
// because the `util` package expects there to be a global variable named `process`.
// Thanks to https://stackoverflow.com/a/65018686/14239942
process: 'process/browser'
})
isHotReloadingEnabled && new ReactRefreshWebpackPlugin({ overlay: false })
].filter(Boolean),
optimization: {
splitChunks: {
@@ -145,17 +133,6 @@ const config = {
},
module: {
rules: [
{
test: /\.js$/,
enforce: "pre",
use: ["source-map-loader"],
resolve: {
fullySpecified: false
},
exclude: [
/node_modules\/@plotly\/mapbox-gl/,
],
},
{
test: /\.(t|j)sx?$/,
exclude: /node_modules/,
@@ -251,7 +228,7 @@ const config = {
}
]
},
devtool: isProduction ? "source-map" : "eval-cheap-module-source-map",
devtool: isProduction ? "source-map" : "cheap-eval-module-source-map",
stats: {
children: false,
modules: false,

3585
yarn.lock

File diff suppressed because it is too large Load Diff