mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
15 Commits
system-sta
...
bq-schema-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3449b70994 | ||
|
|
4fb77867b0 | ||
|
|
a473611cb0 | ||
|
|
210008c714 | ||
|
|
aa5d4f5f4e | ||
|
|
6b811c5245 | ||
|
|
83726da48a | ||
|
|
72dc157bbe | ||
|
|
1b8ff8e810 | ||
|
|
31ddd0fb79 | ||
|
|
5cabf7a724 | ||
|
|
59b135ace7 | ||
|
|
32b41e4112 | ||
|
|
2e31b91054 | ||
|
|
205915e6db |
@@ -34,6 +34,8 @@ module.exports = {
|
||||
// Do not complain about useless contructors in declaration files
|
||||
"no-useless-constructor": "off",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
// Many API fields and generated types use camelcase
|
||||
"@typescript-eslint/camelcase": "off",
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React, { useEffect, useState, useContext } from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import { ErrorBoundaryContext } from "@redash/viz/lib/components/ErrorBoundary";
|
||||
import { Auth } from "@/services/auth";
|
||||
import { Auth, clientConfig } from "@/services/auth";
|
||||
|
||||
// This wrapper modifies `route.render` function and instead of passing `currentRoute` passes an object
|
||||
// that contains:
|
||||
@@ -33,7 +33,7 @@ function ApiKeySessionWrapper({ apiKey, currentRoute, renderChildren }) {
|
||||
};
|
||||
}, [apiKey]);
|
||||
|
||||
if (!isAuthenticated) {
|
||||
if (!isAuthenticated || clientConfig.disablePublicUrls) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +1,33 @@
|
||||
import React, { useEffect, useState } from "react";
|
||||
import PropTypes from "prop-types";
|
||||
// @ts-expect-error (Must be removed after adding @redash/viz typing)
|
||||
import ErrorBoundary, { ErrorBoundaryContext } from "@redash/viz/lib/components/ErrorBoundary";
|
||||
import { Auth } from "@/services/auth";
|
||||
import { policy } from "@/services/policy";
|
||||
import { CurrentRoute } from "@/services/routes";
|
||||
import organizationStatus from "@/services/organizationStatus";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import ApplicationLayout from "./ApplicationLayout";
|
||||
import ErrorMessage from "./ErrorMessage";
|
||||
|
||||
export type UserSessionWrapperRenderChildrenProps<P> = {
|
||||
pageTitle?: string;
|
||||
onError: (error: Error) => void;
|
||||
} & P;
|
||||
|
||||
export interface UserSessionWrapperProps<P> {
|
||||
render: (props: UserSessionWrapperRenderChildrenProps<P>) => React.ReactNode;
|
||||
currentRoute: CurrentRoute<P>;
|
||||
bodyClass?: string;
|
||||
}
|
||||
|
||||
// This wrapper modifies `route.render` function and instead of passing `currentRoute` passes an object
|
||||
// that contains:
|
||||
// - `currentRoute.routeParams`
|
||||
// - `pageTitle` field which is equal to `currentRoute.title`
|
||||
// - `onError` field which is a `handleError` method of nearest error boundary
|
||||
|
||||
function UserSessionWrapper({ bodyClass, currentRoute, renderChildren }) {
|
||||
export function UserSessionWrapper<P>({ bodyClass, currentRoute, render }: UserSessionWrapperProps<P>) {
|
||||
const [isAuthenticated, setIsAuthenticated] = useState(!!Auth.isAuthenticated());
|
||||
|
||||
useEffect(() => {
|
||||
let isCancelled = false;
|
||||
Promise.all([Auth.requireSession(), organizationStatus.refresh(), policy.refresh()])
|
||||
@@ -50,10 +62,10 @@ function UserSessionWrapper({ bodyClass, currentRoute, renderChildren }) {
|
||||
return (
|
||||
<ApplicationLayout>
|
||||
<React.Fragment key={currentRoute.key}>
|
||||
<ErrorBoundary renderError={error => <ErrorMessage error={error} />}>
|
||||
<ErrorBoundary renderError={(error: Error) => <ErrorMessage error={error} />}>
|
||||
<ErrorBoundaryContext.Consumer>
|
||||
{({ handleError }) =>
|
||||
renderChildren({ ...currentRoute.routeParams, pageTitle: currentRoute.title, onError: handleError })
|
||||
{({ handleError }: { handleError: UserSessionWrapperRenderChildrenProps<P>["onError"] }) =>
|
||||
render({ ...currentRoute.routeParams, pageTitle: currentRoute.title, onError: handleError })
|
||||
}
|
||||
</ErrorBoundaryContext.Consumer>
|
||||
</ErrorBoundary>
|
||||
@@ -62,21 +74,35 @@ function UserSessionWrapper({ bodyClass, currentRoute, renderChildren }) {
|
||||
);
|
||||
}
|
||||
|
||||
UserSessionWrapper.propTypes = {
|
||||
bodyClass: PropTypes.string,
|
||||
renderChildren: PropTypes.func,
|
||||
export type RouteWithUserSessionOptions<P> = {
|
||||
render: (props: UserSessionWrapperRenderChildrenProps<P>) => React.ReactNode;
|
||||
bodyClass?: string;
|
||||
title: string;
|
||||
path: string;
|
||||
};
|
||||
|
||||
UserSessionWrapper.defaultProps = {
|
||||
bodyClass: null,
|
||||
renderChildren: () => null,
|
||||
};
|
||||
export const UserSessionWrapperDynamicComponentName = "UserSessionWrapper";
|
||||
|
||||
export default function routeWithUserSession({ render, bodyClass, ...rest }) {
|
||||
export default function routeWithUserSession<P extends {} = {}>({
|
||||
render: originalRender,
|
||||
bodyClass,
|
||||
...rest
|
||||
}: RouteWithUserSessionOptions<P>) {
|
||||
return {
|
||||
...rest,
|
||||
render: currentRoute => (
|
||||
<UserSessionWrapper bodyClass={bodyClass} currentRoute={currentRoute} renderChildren={render} />
|
||||
),
|
||||
render: (currentRoute: CurrentRoute<P>) => {
|
||||
const props = {
|
||||
render: originalRender,
|
||||
bodyClass,
|
||||
currentRoute,
|
||||
};
|
||||
return (
|
||||
<DynamicComponent
|
||||
{...props}
|
||||
name={UserSessionWrapperDynamicComponentName}
|
||||
fallback={<UserSessionWrapper {...props} />}
|
||||
/>
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { isFunction, isString } from "lodash";
|
||||
import { isFunction, isString, isUndefined } from "lodash";
|
||||
import React from "react";
|
||||
import PropTypes from "prop-types";
|
||||
|
||||
@@ -24,6 +24,7 @@ export function unregisterComponent(name) {
|
||||
export default class DynamicComponent extends React.Component {
|
||||
static propTypes = {
|
||||
name: PropTypes.string.isRequired,
|
||||
fallback: PropTypes.node,
|
||||
children: PropTypes.node,
|
||||
};
|
||||
|
||||
@@ -40,10 +41,11 @@ export default class DynamicComponent extends React.Component {
|
||||
}
|
||||
|
||||
render() {
|
||||
const { name, children, ...props } = this.props;
|
||||
const { name, children, fallback, ...props } = this.props;
|
||||
const RealComponent = componentsRegistry.get(name);
|
||||
if (!RealComponent) {
|
||||
return children;
|
||||
// return fallback if any, otherwise return children
|
||||
return isUndefined(fallback) ? children : fallback;
|
||||
}
|
||||
return <RealComponent {...props}>{children}</RealComponent>;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import PropTypes from "prop-types";
|
||||
import Dropdown from "antd/lib/dropdown";
|
||||
import Menu from "antd/lib/menu";
|
||||
import Button from "antd/lib/button";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
|
||||
import PlusCircleFilledIcon from "@ant-design/icons/PlusCircleFilled";
|
||||
import ShareAltOutlinedIcon from "@ant-design/icons/ShareAltOutlined";
|
||||
@@ -22,7 +23,7 @@ export default function QueryControlDropdown(props) {
|
||||
</a>
|
||||
</Menu.Item>
|
||||
)}
|
||||
{!props.query.isNew() && (
|
||||
{!clientConfig.disablePublicUrls && !props.query.isNew() && (
|
||||
<Menu.Item>
|
||||
<a onClick={() => props.showEmbedDialog(props.query, props.selectedTab)} data-test="ShowEmbedDialogButton">
|
||||
<ShareAltOutlinedIcon /> Embed Elsewhere
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
import { map } from "lodash";
|
||||
import React from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import Badge from "antd/lib/badge";
|
||||
import Menu from "antd/lib/menu";
|
||||
import getTags from "@/services/getTags";
|
||||
|
||||
import "./TagsList.less";
|
||||
|
||||
export default class TagsList extends React.Component {
|
||||
static propTypes = {
|
||||
tagsUrl: PropTypes.string.isRequired,
|
||||
onUpdate: PropTypes.func,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
onUpdate: () => {},
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
this.state = {
|
||||
// An array of objects that with the name and count of the tagged items
|
||||
allTags: [],
|
||||
// A set of tag names
|
||||
selectedTags: new Set(),
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
getTags(this.props.tagsUrl).then(allTags => {
|
||||
this.setState({ allTags });
|
||||
});
|
||||
}
|
||||
|
||||
toggleTag(event, tag) {
|
||||
const { selectedTags } = this.state;
|
||||
if (event.shiftKey) {
|
||||
// toggle tag
|
||||
if (selectedTags.has(tag)) {
|
||||
selectedTags.delete(tag);
|
||||
} else {
|
||||
selectedTags.add(tag);
|
||||
}
|
||||
} else {
|
||||
// if the tag is the only selected, deselect it, otherwise select only it
|
||||
if (selectedTags.has(tag) && selectedTags.size === 1) {
|
||||
selectedTags.clear();
|
||||
} else {
|
||||
selectedTags.clear();
|
||||
selectedTags.add(tag);
|
||||
}
|
||||
}
|
||||
this.forceUpdate();
|
||||
|
||||
this.props.onUpdate([...this.state.selectedTags]);
|
||||
}
|
||||
|
||||
render() {
|
||||
const { allTags, selectedTags } = this.state;
|
||||
if (allTags.length > 0) {
|
||||
return (
|
||||
<div className="m-t-10 tags-list tiled">
|
||||
<Menu className="invert-stripe-position" mode="inline" selectedKeys={[...selectedTags]}>
|
||||
{map(allTags, tag => (
|
||||
<Menu.Item key={tag.name} className="m-0">
|
||||
<a
|
||||
className="d-flex align-items-center justify-content-between"
|
||||
onClick={event => this.toggleTag(event, tag.name)}>
|
||||
<span className="max-character col-xs-11">{tag.name}</span>
|
||||
<Badge count={tag.count} />
|
||||
</a>
|
||||
</Menu.Item>
|
||||
))}
|
||||
</Menu>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,47 @@
|
||||
@import '~@/assets/less/ant';
|
||||
@import "~@/assets/less/ant";
|
||||
|
||||
.tags-list {
|
||||
.tags-list-title {
|
||||
margin: 15px 5px 5px 5px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
|
||||
label {
|
||||
display: block;
|
||||
white-space: nowrap;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
a {
|
||||
display: block;
|
||||
white-space: nowrap;
|
||||
cursor: pointer;
|
||||
|
||||
.anticon {
|
||||
font-size: 75%;
|
||||
margin-right: 2px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.ant-badge-count {
|
||||
background-color: fade(@redash-gray, 10%);
|
||||
color: fade(@redash-gray, 75%);
|
||||
}
|
||||
|
||||
.ant-menu-item-selected {
|
||||
.ant-badge-count {
|
||||
background-color: @primary-color;
|
||||
color: white;
|
||||
.ant-menu.ant-menu-inline {
|
||||
border: none;
|
||||
|
||||
.ant-menu-item {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.ant-menu-item-selected {
|
||||
.ant-badge-count {
|
||||
background-color: @primary-color;
|
||||
color: white;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
107
client/app/components/TagsList.tsx
Normal file
107
client/app/components/TagsList.tsx
Normal file
@@ -0,0 +1,107 @@
|
||||
import { map, includes, difference } from "lodash";
|
||||
import React, { useState, useCallback, useEffect } from "react";
|
||||
import Badge from "antd/lib/badge";
|
||||
import Menu from "antd/lib/menu";
|
||||
import CloseOutlinedIcon from "@ant-design/icons/CloseOutlined";
|
||||
import getTags from "@/services/getTags";
|
||||
|
||||
import "./TagsList.less";
|
||||
|
||||
type Tag = {
|
||||
name: string;
|
||||
count?: number;
|
||||
};
|
||||
|
||||
type TagsListProps = {
|
||||
tagsUrl: string;
|
||||
showUnselectAll: boolean;
|
||||
onUpdate?: (selectedTags: string[]) => void;
|
||||
};
|
||||
|
||||
function TagsList({ tagsUrl, showUnselectAll = false, onUpdate }: TagsListProps): JSX.Element | null {
|
||||
const [allTags, setAllTags] = useState<Tag[]>([]);
|
||||
const [selectedTags, setSelectedTags] = useState<string[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
let isCancelled = false;
|
||||
|
||||
getTags(tagsUrl).then(tags => {
|
||||
if (!isCancelled) {
|
||||
setAllTags(tags);
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
isCancelled = true;
|
||||
};
|
||||
}, [tagsUrl]);
|
||||
|
||||
const toggleTag = useCallback(
|
||||
(event, tag) => {
|
||||
let newSelectedTags;
|
||||
if (event.shiftKey) {
|
||||
// toggle tag
|
||||
if (includes(selectedTags, tag)) {
|
||||
newSelectedTags = difference(selectedTags, [tag]);
|
||||
} else {
|
||||
newSelectedTags = [...selectedTags, tag];
|
||||
}
|
||||
} else {
|
||||
// if the tag is the only selected, deselect it, otherwise select only it
|
||||
if (includes(selectedTags, tag) && selectedTags.length === 1) {
|
||||
newSelectedTags = [];
|
||||
} else {
|
||||
newSelectedTags = [tag];
|
||||
}
|
||||
}
|
||||
|
||||
setSelectedTags(newSelectedTags);
|
||||
if (onUpdate) {
|
||||
onUpdate([...newSelectedTags]);
|
||||
}
|
||||
},
|
||||
[selectedTags, onUpdate]
|
||||
);
|
||||
|
||||
const unselectAll = useCallback(() => {
|
||||
setSelectedTags([]);
|
||||
if (onUpdate) {
|
||||
onUpdate([]);
|
||||
}
|
||||
}, [onUpdate]);
|
||||
|
||||
if (allTags.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="tags-list">
|
||||
<div className="tags-list-title">
|
||||
<label>Tags</label>
|
||||
{showUnselectAll && selectedTags.length > 0 && (
|
||||
<a onClick={unselectAll}>
|
||||
<CloseOutlinedIcon />
|
||||
clear selection
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="tiled">
|
||||
<Menu className="invert-stripe-position" mode="inline" selectedKeys={selectedTags}>
|
||||
{map(allTags, tag => (
|
||||
<Menu.Item key={tag.name} className="m-0">
|
||||
<a
|
||||
className="d-flex align-items-center justify-content-between"
|
||||
onClick={event => toggleTag(event, tag.name)}>
|
||||
<span className="max-character col-xs-11">{tag.name}</span>
|
||||
<Badge count={tag.count} />
|
||||
</a>
|
||||
</Menu.Item>
|
||||
))}
|
||||
</Menu>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TagsList;
|
||||
@@ -11,7 +11,7 @@ function toMoment(value) {
|
||||
return value && value.isValid() ? value : null;
|
||||
}
|
||||
|
||||
export default function TimeAgo({ date, placeholder, autoUpdate }) {
|
||||
export default function TimeAgo({ date, placeholder, autoUpdate, variation }) {
|
||||
const startDate = toMoment(date);
|
||||
const [value, setValue] = useState(null);
|
||||
const title = useMemo(() => (startDate ? startDate.format(clientConfig.dateTimeFormat) : null), [startDate]);
|
||||
@@ -28,6 +28,13 @@ export default function TimeAgo({ date, placeholder, autoUpdate }) {
|
||||
}
|
||||
}, [autoUpdate, startDate, placeholder]);
|
||||
|
||||
if (variation === "timeAgoInTooltip") {
|
||||
return (
|
||||
<Tooltip title={value}>
|
||||
<span data-test="TimeAgo">{title}</span>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Tooltip title={title}>
|
||||
<span data-test="TimeAgo">{value}</span>
|
||||
@@ -39,6 +46,7 @@ TimeAgo.propTypes = {
|
||||
date: PropTypes.oneOfType([PropTypes.string, PropTypes.number, PropTypes.instanceOf(Date), Moment]),
|
||||
placeholder: PropTypes.string,
|
||||
autoUpdate: PropTypes.bool,
|
||||
variation: PropTypes.oneOf(["timeAgoInTooltip"]),
|
||||
};
|
||||
|
||||
TimeAgo.defaultProps = {
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
import Input from "antd/lib/input";
|
||||
import { includes, isEmpty } from "lodash";
|
||||
import PropTypes from "prop-types";
|
||||
import React from "react";
|
||||
import Link from "@/components/Link";
|
||||
import EmptyState from "@/components/items-list/components/EmptyState";
|
||||
|
||||
import "./CardsList.less";
|
||||
|
||||
const { Search } = Input;
|
||||
|
||||
export default class CardsList extends React.Component {
|
||||
static propTypes = {
|
||||
items: PropTypes.arrayOf(
|
||||
PropTypes.shape({
|
||||
title: PropTypes.string.isRequired,
|
||||
imgSrc: PropTypes.string.isRequired,
|
||||
onClick: PropTypes.func,
|
||||
href: PropTypes.string,
|
||||
})
|
||||
),
|
||||
showSearch: PropTypes.bool,
|
||||
};
|
||||
|
||||
static defaultProps = {
|
||||
items: [],
|
||||
showSearch: false,
|
||||
};
|
||||
|
||||
state = {
|
||||
searchText: "",
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.items = [];
|
||||
|
||||
let itemId = 1;
|
||||
props.items.forEach(item => {
|
||||
this.items.push({ id: itemId, ...item });
|
||||
itemId += 1;
|
||||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
renderListItem(item) {
|
||||
return (
|
||||
<Link key={`card${item.id}`} className="visual-card" onClick={item.onClick} href={item.href}>
|
||||
<img alt={item.title} src={item.imgSrc} />
|
||||
<h3>{item.title}</h3>
|
||||
</Link>
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
const { showSearch } = this.props;
|
||||
const { searchText } = this.state;
|
||||
|
||||
const filteredItems = this.items.filter(
|
||||
item => isEmpty(searchText) || includes(item.title.toLowerCase(), searchText.toLowerCase())
|
||||
);
|
||||
|
||||
return (
|
||||
<div data-test="CardsList">
|
||||
{showSearch && (
|
||||
<div className="row p-10">
|
||||
<div className="col-md-4 col-md-offset-4">
|
||||
<Search placeholder="Search..." onChange={e => this.setState({ searchText: e.target.value })} autoFocus />
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{isEmpty(filteredItems) ? (
|
||||
<EmptyState className="" />
|
||||
) : (
|
||||
<div className="row">
|
||||
<div className="col-lg-12 d-inline-flex flex-wrap visual-card-list">
|
||||
{filteredItems.map(item => this.renderListItem(item))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
80
client/app/components/cards-list/CardsList.tsx
Normal file
80
client/app/components/cards-list/CardsList.tsx
Normal file
@@ -0,0 +1,80 @@
|
||||
import { includes, isEmpty } from "lodash";
|
||||
import PropTypes from "prop-types";
|
||||
import React, { useState } from "react";
|
||||
import Input from "antd/lib/input";
|
||||
import Link from "@/components/Link";
|
||||
import EmptyState from "@/components/items-list/components/EmptyState";
|
||||
|
||||
import "./CardsList.less";
|
||||
|
||||
export interface CardsListItem {
|
||||
title: string;
|
||||
imgSrc: string;
|
||||
onClick?: () => void;
|
||||
href?: string;
|
||||
}
|
||||
|
||||
export interface CardsListProps {
|
||||
items?: CardsListItem[];
|
||||
showSearch?: boolean;
|
||||
}
|
||||
|
||||
interface ListItemProps {
|
||||
item: CardsListItem;
|
||||
keySuffix: string;
|
||||
}
|
||||
|
||||
function ListItem({ item, keySuffix }: ListItemProps) {
|
||||
return (
|
||||
<Link key={`card${keySuffix}`} className="visual-card" onClick={item.onClick} href={item.href}>
|
||||
<img alt={item.title} src={item.imgSrc} />
|
||||
<h3>{item.title}</h3>
|
||||
</Link>
|
||||
);
|
||||
}
|
||||
|
||||
export default function CardsList({ items = [], showSearch = false }: CardsListProps) {
|
||||
const [searchText, setSearchText] = useState("");
|
||||
const filteredItems = items.filter(
|
||||
item => isEmpty(searchText) || includes(item.title.toLowerCase(), searchText.toLowerCase())
|
||||
);
|
||||
|
||||
return (
|
||||
<div data-test="CardsList">
|
||||
{showSearch && (
|
||||
<div className="row p-10">
|
||||
<div className="col-md-4 col-md-offset-4">
|
||||
<Input.Search
|
||||
placeholder="Search..."
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) => setSearchText(e.target.value)}
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{isEmpty(filteredItems) ? (
|
||||
<EmptyState className="" />
|
||||
) : (
|
||||
<div className="row">
|
||||
<div className="col-lg-12 d-inline-flex flex-wrap visual-card-list">
|
||||
{filteredItems.map((item: CardsListItem, index: number) => (
|
||||
<ListItem key={index} item={item} keySuffix={index.toString()} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
CardsList.propTypes = {
|
||||
items: PropTypes.arrayOf(
|
||||
PropTypes.shape({
|
||||
title: PropTypes.string.isRequired,
|
||||
imgSrc: PropTypes.string.isRequired,
|
||||
onClick: PropTypes.func,
|
||||
href: PropTypes.string,
|
||||
})
|
||||
),
|
||||
showSearch: PropTypes.bool,
|
||||
};
|
||||
@@ -3,10 +3,11 @@ import PropTypes from "prop-types";
|
||||
import Button from "antd/lib/button";
|
||||
import Modal from "antd/lib/modal";
|
||||
import { wrap as wrapDialog, DialogPropType } from "@/components/DialogWrapper";
|
||||
import { FiltersType } from "@/components/Filters";
|
||||
import VisualizationRenderer from "@/components/visualizations/VisualizationRenderer";
|
||||
import VisualizationName from "@/components/visualizations/VisualizationName";
|
||||
|
||||
function ExpandedWidgetDialog({ dialog, widget }) {
|
||||
function ExpandedWidgetDialog({ dialog, widget, filters }) {
|
||||
return (
|
||||
<Modal
|
||||
{...dialog.props}
|
||||
@@ -20,6 +21,7 @@ function ExpandedWidgetDialog({ dialog, widget }) {
|
||||
<VisualizationRenderer
|
||||
visualization={widget.visualization}
|
||||
queryResult={widget.getQueryResult()}
|
||||
filters={filters}
|
||||
context="widget"
|
||||
/>
|
||||
</Modal>
|
||||
@@ -29,6 +31,11 @@ function ExpandedWidgetDialog({ dialog, widget }) {
|
||||
ExpandedWidgetDialog.propTypes = {
|
||||
dialog: DialogPropType.isRequired,
|
||||
widget: PropTypes.object.isRequired, // eslint-disable-line react/forbid-prop-types
|
||||
filters: FiltersType,
|
||||
};
|
||||
|
||||
ExpandedWidgetDialog.defaultProps = {
|
||||
filters: [],
|
||||
};
|
||||
|
||||
export default wrapDialog(ExpandedWidgetDialog);
|
||||
|
||||
@@ -209,7 +209,10 @@ class VisualizationWidget extends React.Component {
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.state = { localParameters: props.widget.getLocalParameters() };
|
||||
this.state = {
|
||||
localParameters: props.widget.getLocalParameters(),
|
||||
localFilters: props.filters,
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
@@ -219,8 +222,12 @@ class VisualizationWidget extends React.Component {
|
||||
onLoad();
|
||||
}
|
||||
|
||||
onLocalFiltersChange = localFilters => {
|
||||
this.setState({ localFilters });
|
||||
};
|
||||
|
||||
expandWidget = () => {
|
||||
ExpandedWidgetDialog.showModal({ widget: this.props.widget });
|
||||
ExpandedWidgetDialog.showModal({ widget: this.props.widget, filters: this.state.localFilters });
|
||||
};
|
||||
|
||||
editParameterMappings = () => {
|
||||
@@ -260,6 +267,7 @@ class VisualizationWidget extends React.Component {
|
||||
visualization={widget.visualization}
|
||||
queryResult={widgetQueryResult}
|
||||
filters={filters}
|
||||
onFiltersChange={this.onLocalFiltersChange}
|
||||
context="widget"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -132,13 +132,13 @@ ProfileImage.propTypes = {
|
||||
Tags
|
||||
*/
|
||||
|
||||
export function Tags({ url, onChange }) {
|
||||
export function Tags({ url, onChange, showUnselectAll }) {
|
||||
if (url === "") {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div className="m-b-10">
|
||||
<TagsList tagsUrl={url} onUpdate={onChange} />
|
||||
<TagsList tagsUrl={url} onUpdate={onChange} showUnselectAll={showUnselectAll} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -146,4 +146,6 @@ export function Tags({ url, onChange }) {
|
||||
Tags.propTypes = {
|
||||
url: PropTypes.string.isRequired,
|
||||
onChange: PropTypes.func.isRequired,
|
||||
showUnselectAll: PropTypes.bool,
|
||||
unselectAllButtonTitle: PropTypes.string,
|
||||
};
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import React, { useCallback } from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import recordEvent from "@/services/recordEvent";
|
||||
import Checkbox from "antd/lib/checkbox";
|
||||
import Tooltip from "antd/lib/tooltip";
|
||||
|
||||
export default function AutoLimitCheckbox({ available, checked, onChange }) {
|
||||
const handleClick = useCallback(() => {
|
||||
recordEvent("checkbox_auto_limit", "screen", "query_editor", { state: !checked });
|
||||
onChange(!checked);
|
||||
}, [checked, onChange]);
|
||||
|
||||
let tooltipMessage = null;
|
||||
if (!available) {
|
||||
tooltipMessage = "Auto limiting is not available for this Data Source type.";
|
||||
} else {
|
||||
tooltipMessage = "Auto limit results to first 1000 rows.";
|
||||
}
|
||||
|
||||
return (
|
||||
<Tooltip placement="top" title={tooltipMessage}>
|
||||
<Checkbox
|
||||
className="query-editor-controls-checkbox"
|
||||
disabled={!available}
|
||||
onClick={handleClick}
|
||||
checked={available && checked}>
|
||||
LIMIT 1000
|
||||
</Checkbox>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
AutoLimitCheckbox.propTypes = {
|
||||
available: PropTypes.bool,
|
||||
checked: PropTypes.bool.isRequired,
|
||||
onChange: PropTypes.func.isRequired,
|
||||
};
|
||||
@@ -8,6 +8,7 @@ import KeyboardShortcuts, { humanReadableShortcut } from "@/services/KeyboardSho
|
||||
|
||||
import AutocompleteToggle from "./AutocompleteToggle";
|
||||
import "./QueryEditorControls.less";
|
||||
import AutoLimitCheckbox from "@/components/queries/QueryEditor/AutoLimitCheckbox";
|
||||
|
||||
export function ButtonTooltip({ title, shortcut, ...props }) {
|
||||
shortcut = humanReadableShortcut(shortcut, 1); // show only primary shortcut
|
||||
@@ -38,6 +39,7 @@ export default function EditorControl({
|
||||
saveButtonProps,
|
||||
executeButtonProps,
|
||||
autocompleteToggleProps,
|
||||
autoLimitCheckboxProps,
|
||||
dataSourceSelectorProps,
|
||||
}) {
|
||||
useEffect(() => {
|
||||
@@ -84,6 +86,7 @@ export default function EditorControl({
|
||||
onToggle={autocompleteToggleProps.onToggle}
|
||||
/>
|
||||
)}
|
||||
{autoLimitCheckboxProps !== false && <AutoLimitCheckbox {...autoLimitCheckboxProps} />}
|
||||
{dataSourceSelectorProps === false && <span className="query-editor-controls-spacer" />}
|
||||
{dataSourceSelectorProps !== false && (
|
||||
<Select
|
||||
@@ -153,6 +156,10 @@ EditorControl.propTypes = {
|
||||
onToggle: PropTypes.func,
|
||||
}),
|
||||
]),
|
||||
autoLimitCheckboxProps: PropTypes.oneOfType([
|
||||
PropTypes.bool, // `false` to hide
|
||||
PropTypes.shape(AutoLimitCheckbox.propTypes),
|
||||
]),
|
||||
dataSourceSelectorProps: PropTypes.oneOfType([
|
||||
PropTypes.bool, // `false` to hide
|
||||
PropTypes.shape({
|
||||
@@ -175,5 +182,6 @@ EditorControl.defaultProps = {
|
||||
saveButtonProps: false,
|
||||
executeButtonProps: false,
|
||||
autocompleteToggleProps: false,
|
||||
autoLimitCheckboxProps: false,
|
||||
dataSourceSelectorProps: false,
|
||||
};
|
||||
|
||||
@@ -21,6 +21,12 @@
|
||||
}
|
||||
}
|
||||
|
||||
.query-editor-controls-checkbox {
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
margin: auto 5px;
|
||||
}
|
||||
|
||||
.query-editor-controls-spacer {
|
||||
flex: 1 1 auto;
|
||||
height: 35px; // same as Antd <Select>
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { map, find } from "lodash";
|
||||
import { isEqual, map, find, fromPairs } from "lodash";
|
||||
import React, { useState, useMemo, useEffect, useRef } from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import useQueryResultData from "@/lib/useQueryResultData";
|
||||
import useImmutableCallback from "@/lib/hooks/useImmutableCallback";
|
||||
import Filters, { FiltersType, filterData } from "@/components/Filters";
|
||||
import { VisualizationType } from "@redash/viz/lib";
|
||||
import { Renderer } from "@/components/visualizations/visualizationComponents";
|
||||
@@ -24,23 +25,41 @@ function combineFilters(localFilters, globalFilters) {
|
||||
});
|
||||
}
|
||||
|
||||
function areFiltersEqual(a, b) {
|
||||
if (a.length !== b.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
a = fromPairs(map(a, item => [item.name, item]));
|
||||
b = fromPairs(map(b, item => [item.name, item]));
|
||||
|
||||
return isEqual(a, b);
|
||||
}
|
||||
|
||||
export default function VisualizationRenderer(props) {
|
||||
const data = useQueryResultData(props.queryResult);
|
||||
const [filters, setFilters] = useState(data.filters);
|
||||
const [filters, setFilters] = useState(() => combineFilters(data.filters, props.filters)); // lazy initialization
|
||||
const filtersRef = useRef();
|
||||
filtersRef.current = filters;
|
||||
|
||||
const handleFiltersChange = useImmutableCallback(newFilters => {
|
||||
if (!areFiltersEqual(newFilters, filters)) {
|
||||
setFilters(newFilters);
|
||||
props.onFiltersChange(newFilters);
|
||||
}
|
||||
});
|
||||
|
||||
// Reset local filters when query results updated
|
||||
useEffect(() => {
|
||||
setFilters(combineFilters(data.filters, props.filters));
|
||||
}, [data.filters, props.filters]);
|
||||
handleFiltersChange(combineFilters(data.filters, props.filters));
|
||||
}, [data.filters, props.filters, handleFiltersChange]);
|
||||
|
||||
// Update local filters when global filters changed.
|
||||
// For correct behavior need to watch only `props.filters` here,
|
||||
// therefore using ref to access current local filters
|
||||
useEffect(() => {
|
||||
setFilters(combineFilters(filtersRef.current, props.filters));
|
||||
}, [props.filters]);
|
||||
handleFiltersChange(combineFilters(filtersRef.current, props.filters));
|
||||
}, [props.filters, handleFiltersChange]);
|
||||
|
||||
const filteredData = useMemo(
|
||||
() => ({
|
||||
@@ -66,7 +85,7 @@ export default function VisualizationRenderer(props) {
|
||||
options={options}
|
||||
data={filteredData}
|
||||
visualizationName={visualization.name}
|
||||
addonBefore={showFilters && <Filters filters={filters} onChange={setFilters} />}
|
||||
addonBefore={showFilters && <Filters filters={filters} onChange={handleFiltersChange} />}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -74,12 +93,14 @@ export default function VisualizationRenderer(props) {
|
||||
VisualizationRenderer.propTypes = {
|
||||
visualization: VisualizationType.isRequired,
|
||||
queryResult: PropTypes.object.isRequired, // eslint-disable-line react/forbid-prop-types
|
||||
filters: FiltersType,
|
||||
showFilters: PropTypes.bool,
|
||||
filters: FiltersType,
|
||||
onFiltersChange: PropTypes.func,
|
||||
context: PropTypes.oneOf(["query", "widget"]).isRequired,
|
||||
};
|
||||
|
||||
VisualizationRenderer.defaultProps = {
|
||||
filters: [],
|
||||
showFilters: true,
|
||||
filters: [],
|
||||
onFiltersChange: () => {},
|
||||
};
|
||||
|
||||
@@ -6,6 +6,7 @@ import { Renderer as VisRenderer, Editor as VisEditor, updateVisualizationsSetti
|
||||
import { clientConfig } from "@/services/auth";
|
||||
|
||||
import countriesDataUrl from "@redash/viz/lib/visualizations/choropleth/maps/countries.geo.json";
|
||||
import usaDataUrl from "@redash/viz/lib/visualizations/choropleth/maps/usa-albers.geo.json";
|
||||
import subdivJapanDataUrl from "@redash/viz/lib/visualizations/choropleth/maps/japan.prefectures.geo.json";
|
||||
|
||||
function wrapComponentWithSettings(WrappedComponent) {
|
||||
@@ -17,10 +18,40 @@ function wrapComponentWithSettings(WrappedComponent) {
|
||||
countries: {
|
||||
name: "Countries",
|
||||
url: countriesDataUrl,
|
||||
fieldNames: {
|
||||
name: "Short name",
|
||||
name_long: "Full name",
|
||||
abbrev: "Abbreviated name",
|
||||
iso_a2: "ISO code (2 letters)",
|
||||
iso_a3: "ISO code (3 letters)",
|
||||
iso_n3: "ISO code (3 digits)",
|
||||
},
|
||||
},
|
||||
usa: {
|
||||
name: "USA",
|
||||
url: usaDataUrl,
|
||||
fieldNames: {
|
||||
name: "Name",
|
||||
ns_code: "National Standard ANSI Code (8-character)",
|
||||
geoid: "Geographic ID",
|
||||
usps_abbrev: "USPS Abbreviation",
|
||||
fips_code: "FIPS Code (2-character)",
|
||||
},
|
||||
},
|
||||
subdiv_japan: {
|
||||
name: "Japan/Prefectures",
|
||||
url: subdivJapanDataUrl,
|
||||
fieldNames: {
|
||||
name: "Name",
|
||||
name_alt: "Name (alternative)",
|
||||
name_local: "Name (local)",
|
||||
iso_3166_2: "ISO-3166-2",
|
||||
postal: "Postal Code",
|
||||
type: "Type",
|
||||
type_en: "Type (EN)",
|
||||
region: "Region",
|
||||
region_code: "Region Code",
|
||||
},
|
||||
},
|
||||
},
|
||||
...pick(clientConfig, [
|
||||
|
||||
@@ -11,6 +11,15 @@ export const IntervalEnum = {
|
||||
MILLISECONDS: "millisecond",
|
||||
};
|
||||
|
||||
export const AbbreviatedTimeUnits = {
|
||||
SECONDS: "s",
|
||||
MINUTES: "m",
|
||||
HOURS: "h",
|
||||
DAYS: "d",
|
||||
WEEKS: "w",
|
||||
MILLISECONDS: "ms",
|
||||
};
|
||||
|
||||
export function formatDateTime(value) {
|
||||
if (!value) {
|
||||
return "";
|
||||
|
||||
@@ -95,7 +95,7 @@ class DashboardList extends React.Component {
|
||||
onChange={controller.updateSearch}
|
||||
/>
|
||||
<Sidebar.Menu items={this.sidebarMenu} selected={controller.params.currentPage} />
|
||||
<Sidebar.Tags url="api/dashboards/tags" onChange={controller.updateSelectedTags} />
|
||||
<Sidebar.Tags url="api/dashboards/tags" onChange={controller.updateSelectedTags} showUnselectAll />
|
||||
</Layout.Sidebar>
|
||||
<Layout.Content>
|
||||
<div data-test="DashboardLayoutContent">
|
||||
|
||||
@@ -178,7 +178,8 @@ function DashboardControl({ dashboardOptions }) {
|
||||
const showPublishButton = dashboard.is_draft;
|
||||
const showRefreshButton = true;
|
||||
const showFullscreenButton = !dashboard.is_draft;
|
||||
const showShareButton = dashboard.publicAccessEnabled || (canEditDashboard && !dashboard.is_draft);
|
||||
const canShareDashboard = canEditDashboard && !dashboard.is_draft;
|
||||
const showShareButton = !clientConfig.disablePublicUrls && (dashboard.publicAccessEnabled || canShareDashboard);
|
||||
const showMoreOptionsButton = canEditDashboard;
|
||||
return (
|
||||
<div className="dashboard-control">
|
||||
|
||||
@@ -134,7 +134,7 @@ class QueriesList extends React.Component {
|
||||
onChange={controller.updateSearch}
|
||||
/>
|
||||
<Sidebar.Menu items={this.sidebarMenu} selected={controller.params.currentPage} />
|
||||
<Sidebar.Tags url="api/queries/tags" onChange={controller.updateSelectedTags} />
|
||||
<Sidebar.Tags url="api/queries/tags" onChange={controller.updateSelectedTags} showUnselectAll />
|
||||
</Layout.Sidebar>
|
||||
<Layout.Content>
|
||||
{controller.isLoaded && controller.isEmpty ? (
|
||||
|
||||
@@ -26,6 +26,7 @@ import { getEditorComponents } from "@/components/queries/editor-components";
|
||||
import useQuery from "./hooks/useQuery";
|
||||
import useVisualizationTabHandler from "./hooks/useVisualizationTabHandler";
|
||||
import useAutocompleteFlags from "./hooks/useAutocompleteFlags";
|
||||
import useAutoLimitFlags from "./hooks/useAutoLimitFlags";
|
||||
import useQueryExecute from "./hooks/useQueryExecute";
|
||||
import useQueryResultData from "@/lib/useQueryResultData";
|
||||
import useQueryDataSources from "./hooks/useQueryDataSources";
|
||||
@@ -44,7 +45,6 @@ import useUnsavedChangesAlert from "./hooks/useUnsavedChangesAlert";
|
||||
import "./QuerySource.less";
|
||||
|
||||
function chooseDataSourceId(dataSourceIds, availableDataSources) {
|
||||
dataSourceIds = map(dataSourceIds, v => parseInt(v, 10));
|
||||
availableDataSources = map(availableDataSources, ds => ds.id);
|
||||
return find(dataSourceIds, id => includes(availableDataSources, id)) || null;
|
||||
}
|
||||
@@ -77,6 +77,7 @@ function QuerySource(props) {
|
||||
|
||||
const editorRef = useRef(null);
|
||||
const [autocompleteAvailable, autocompleteEnabled, toggleAutocomplete] = useAutocompleteFlags(schema);
|
||||
const [autoLimitAvailable, autoLimitChecked, setAutoLimit] = useAutoLimitFlags(dataSource, query, setQuery);
|
||||
|
||||
const [handleQueryEditorChange] = useDebouncedCallback(queryText => {
|
||||
setQuery(extend(query.clone(), { query: queryText }));
|
||||
@@ -306,6 +307,11 @@ function QuerySource(props) {
|
||||
enabled: autocompleteEnabled,
|
||||
onToggle: toggleAutocomplete,
|
||||
}}
|
||||
autoLimitCheckboxProps={{
|
||||
available: autoLimitAvailable,
|
||||
checked: autoLimitChecked,
|
||||
onChange: setAutoLimit,
|
||||
}}
|
||||
dataSourceSelectorProps={
|
||||
dataSource
|
||||
? {
|
||||
|
||||
@@ -123,7 +123,7 @@ export default function QueryPageHeader({
|
||||
},
|
||||
{
|
||||
showAPIKey: {
|
||||
isAvailable: !queryFlags.isNew,
|
||||
isAvailable: !clientConfig.disablePublicUrls && !queryFlags.isNew,
|
||||
title: "Show API Key",
|
||||
onClick: openApiKeyDialog,
|
||||
},
|
||||
@@ -199,7 +199,7 @@ export default function QueryPageHeader({
|
||||
|
||||
{!queryFlags.isNew && (
|
||||
<Dropdown overlay={moreActionsMenu} trigger={["click"]}>
|
||||
<Button>
|
||||
<Button data-test="QueryPageHeaderMoreButton">
|
||||
<EllipsisOutlinedIcon rotate={90} />
|
||||
</Button>
|
||||
</Dropdown>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useMemo, useCallback } from "react";
|
||||
import React, { useState, useMemo, useCallback } from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import cx from "classnames";
|
||||
import { find, orderBy } from "lodash";
|
||||
@@ -120,6 +120,8 @@ export default function QueryVisualizationTabs({
|
||||
const isFirstVisualization = useCallback(visId => visId === orderedVisualizations[0].id, [orderedVisualizations]);
|
||||
const isMobile = useMedia({ maxWidth: 768 });
|
||||
|
||||
const [filters, setFilters] = useState([]);
|
||||
|
||||
return (
|
||||
<Tabs
|
||||
{...tabsProps}
|
||||
@@ -142,7 +144,13 @@ export default function QueryVisualizationTabs({
|
||||
/>
|
||||
}>
|
||||
{queryResult ? (
|
||||
<VisualizationRenderer visualization={visualization} queryResult={queryResult} context="query" />
|
||||
<VisualizationRenderer
|
||||
visualization={visualization}
|
||||
queryResult={queryResult}
|
||||
context="query"
|
||||
filters={filters}
|
||||
onFiltersChange={setFilters}
|
||||
/>
|
||||
) : (
|
||||
<EmptyState
|
||||
title="Query Has no Result"
|
||||
|
||||
24
client/app/pages/queries/hooks/useAutoLimitFlags.js
Normal file
24
client/app/pages/queries/hooks/useAutoLimitFlags.js
Normal file
@@ -0,0 +1,24 @@
|
||||
import { useCallback, useState } from "react";
|
||||
import localOptions from "@/lib/localOptions";
|
||||
import { get, extend } from "lodash";
|
||||
|
||||
function isAutoLimitAvailable(dataSource) {
|
||||
return get(dataSource, "supports_auto_limit", false);
|
||||
}
|
||||
|
||||
export default function useAutoLimitFlags(dataSource, query, setQuery) {
|
||||
const isAvailable = isAutoLimitAvailable(dataSource);
|
||||
const [isChecked, setIsChecked] = useState(localOptions.get("applyAutoLimit", true));
|
||||
query.options.apply_auto_limit = isAvailable && isChecked;
|
||||
|
||||
const setAutoLimit = useCallback(
|
||||
state => {
|
||||
setIsChecked(state);
|
||||
localOptions.set("applyAutoLimit", state);
|
||||
setQuery(extend(query.clone(), { options: { ...query.options, apply_auto_limit: isAvailable && state } }));
|
||||
},
|
||||
[query, setQuery, isAvailable]
|
||||
);
|
||||
|
||||
return [isAvailable, isChecked, setAutoLimit];
|
||||
}
|
||||
@@ -1,7 +1,20 @@
|
||||
import { noop } from "lodash";
|
||||
import { noop, extend, pick } from "lodash";
|
||||
import { useCallback, useState } from "react";
|
||||
import url from "url";
|
||||
import qs from "query-string";
|
||||
import { Query } from "@/services/query";
|
||||
|
||||
function keepCurrentUrlParams(targetUrl) {
|
||||
const currentUrlParams = qs.parse(window.location.search);
|
||||
targetUrl = url.parse(targetUrl);
|
||||
const targetUrlParams = qs.parse(targetUrl.search);
|
||||
return url.format(
|
||||
extend(pick(targetUrl, ["protocol", "auth", "host", "pathname", "hash"]), {
|
||||
search: qs.stringify(extend(currentUrlParams, targetUrlParams)),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export default function useDuplicateQuery(query) {
|
||||
const [isDuplicating, setIsDuplicating] = useState(false);
|
||||
|
||||
@@ -16,7 +29,7 @@ export default function useDuplicateQuery(query) {
|
||||
setIsDuplicating(true);
|
||||
Query.fork({ id: query.id })
|
||||
.then(newQuery => {
|
||||
tab.location = newQuery.getUrl(true);
|
||||
tab.location = keepCurrentUrlParams(newQuery.getUrl(true));
|
||||
})
|
||||
.finally(() => {
|
||||
setIsDuplicating(false);
|
||||
|
||||
@@ -1,40 +1,47 @@
|
||||
import React, { useState, useEffect, useMemo, useRef, useCallback } from "react";
|
||||
import React, { useState, useEffect, useCallback } from "react";
|
||||
import Button from "antd/lib/button";
|
||||
import Modal from "antd/lib/modal";
|
||||
import Alert from "antd/lib/alert";
|
||||
import DynamicForm from "@/components/dynamic-form/DynamicForm";
|
||||
import { wrap as wrapDialog, DialogPropType } from "@/components/DialogWrapper";
|
||||
import recordEvent from "@/services/recordEvent";
|
||||
|
||||
const formFields = [
|
||||
{ required: true, name: "name", title: "Name", type: "text", autoFocus: true },
|
||||
{ required: true, name: "email", title: "Email", type: "email" },
|
||||
];
|
||||
|
||||
function CreateUserDialog({ dialog }) {
|
||||
const [error, setError] = useState(null);
|
||||
const formRef = useRef();
|
||||
|
||||
useEffect(() => {
|
||||
recordEvent("view", "page", "users/new");
|
||||
}, []);
|
||||
|
||||
const createUser = useCallback(() => {
|
||||
if (formRef.current) {
|
||||
formRef.current.validateFieldsAndScroll((err, values) => {
|
||||
if (!err) {
|
||||
dialog.close(values).catch(setError);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [dialog]);
|
||||
|
||||
const formFields = useMemo(() => {
|
||||
const common = { required: true, props: { onPressEnter: createUser } };
|
||||
return [
|
||||
{ ...common, name: "name", title: "Name", type: "text", autoFocus: true },
|
||||
{ ...common, name: "email", title: "Email", type: "email" },
|
||||
];
|
||||
}, [createUser]);
|
||||
const handleSubmit = useCallback(values => dialog.close(values).catch(setError), [dialog]);
|
||||
|
||||
return (
|
||||
<Modal {...dialog.props} title="Create a New User" okText="Create" onOk={createUser}>
|
||||
<DynamicForm fields={formFields} ref={formRef} hideSubmitButton />
|
||||
{error && <Alert message={error.message} type="error" showIcon />}
|
||||
<Modal
|
||||
{...dialog.props}
|
||||
title="Create a New User"
|
||||
footer={[
|
||||
<Button key="cancel" {...dialog.props.cancelButtonProps} onClick={dialog.dismiss}>
|
||||
Cancel
|
||||
</Button>,
|
||||
<Button
|
||||
key="submit"
|
||||
{...dialog.props.okButtonProps}
|
||||
htmlType="submit"
|
||||
type="primary"
|
||||
form="userForm"
|
||||
data-test="SaveUserButton">
|
||||
Create
|
||||
</Button>,
|
||||
]}
|
||||
wrapProps={{
|
||||
"data-test": "CreateUserDialog",
|
||||
}}>
|
||||
<DynamicForm id="userForm" fields={formFields} onSubmit={handleSubmit} hideSubmitButton />
|
||||
{error && <Alert message={error.message} type="error" showIcon data-test="CreateUserErrorAlert" />}
|
||||
</Modal>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import debug from "debug";
|
||||
import { includes, extend } from "lodash";
|
||||
import location from "@/services/location";
|
||||
import { axios } from "@/services/axios";
|
||||
import { notifySessionRestored } from "@/services/restoreSession";
|
||||
|
||||
export const currentUser = {
|
||||
canEdit(object) {
|
||||
@@ -46,6 +47,9 @@ export const Auth = {
|
||||
isAuthenticated() {
|
||||
return session.loaded && session.user.id;
|
||||
},
|
||||
getLoginUrl() {
|
||||
return AuthUrls.Login;
|
||||
},
|
||||
setLoginUrl(loginUrl) {
|
||||
AuthUrls.Login = loginUrl;
|
||||
},
|
||||
@@ -94,6 +98,7 @@ export const Auth = {
|
||||
.then(() => {
|
||||
if (Auth.isAuthenticated()) {
|
||||
logger("Loaded session");
|
||||
notifySessionRestored();
|
||||
return session;
|
||||
}
|
||||
logger("Need to login, redirecting");
|
||||
|
||||
@@ -1,15 +1,48 @@
|
||||
import { get, includes } from "lodash";
|
||||
import axiosLib from "axios";
|
||||
import createAuthRefreshInterceptor from "axios-auth-refresh";
|
||||
import { Auth } from "@/services/auth";
|
||||
import qs from "query-string";
|
||||
import Cookies from "js-cookie";
|
||||
import { restoreSession } from "@/services/restoreSession";
|
||||
|
||||
export const axios = axiosLib.create({
|
||||
paramsSerializer: params => qs.stringify(params),
|
||||
xsrfCookieName: "csrf_token",
|
||||
xsrfHeaderName: "X-CSRF-TOKEN",
|
||||
});
|
||||
|
||||
const getData = ({ data }) => data;
|
||||
axios.interceptors.response.use(response => response.data);
|
||||
|
||||
axios.interceptors.response.use(getData);
|
||||
export const csrfRefreshInterceptor = createAuthRefreshInterceptor(
|
||||
axios,
|
||||
error => {
|
||||
const message = get(error, "response.data.message");
|
||||
if (error.isAxiosError && includes(message, "CSRF")) {
|
||||
return axios.get("/ping");
|
||||
} else {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
},
|
||||
{ statusCodes: [400] }
|
||||
);
|
||||
|
||||
export const sessionRefreshInterceptor = createAuthRefreshInterceptor(
|
||||
axios,
|
||||
error => {
|
||||
const status = parseInt(get(error, "response.status"));
|
||||
const message = get(error, "response.data.message");
|
||||
// TODO: In axios@0.9.1 this check could be replaced with { skipAuthRefresh: true } flag. See axios-auth-refresh docs
|
||||
const requestUrl = get(error, "config.url");
|
||||
if (error.isAxiosError && (status === 401 || includes(message, "Please login")) && requestUrl !== "api/session") {
|
||||
return restoreSession();
|
||||
}
|
||||
return Promise.reject(error);
|
||||
},
|
||||
{
|
||||
statusCodes: [401, 404],
|
||||
pauseInstanceWhileRefreshing: false, // According to docs, `false` is default value, but in fact it's not :-)
|
||||
}
|
||||
);
|
||||
|
||||
axios.interceptors.request.use(config => {
|
||||
const apiKey = Auth.getApiKey();
|
||||
@@ -17,10 +50,5 @@ axios.interceptors.request.use(config => {
|
||||
config.headers.Authorization = `Key ${apiKey}`;
|
||||
}
|
||||
|
||||
const csrfToken = Cookies.get("csrf_token");
|
||||
if (csrfToken) {
|
||||
config.headers.common["X-CSRF-TOKEN"] = csrfToken;
|
||||
}
|
||||
|
||||
return config;
|
||||
});
|
||||
|
||||
@@ -435,11 +435,11 @@ class QueryResult {
|
||||
return `${queryName.replace(/ /g, "_") + moment(this.getUpdatedAt()).format("_YYYY_MM_DD")}.${fileType}`;
|
||||
}
|
||||
|
||||
static getByQueryId(id, parameters, maxAge) {
|
||||
static getByQueryId(id, parameters, applyAutoLimit, maxAge) {
|
||||
const queryResult = new QueryResult();
|
||||
|
||||
axios
|
||||
.post(`api/queries/${id}/results`, { id, parameters, max_age: maxAge })
|
||||
.post(`api/queries/${id}/results`, { id, parameters, apply_auto_limit: applyAutoLimit, max_age: maxAge })
|
||||
.then(response => {
|
||||
queryResult.update(response);
|
||||
|
||||
@@ -454,13 +454,14 @@ class QueryResult {
|
||||
return queryResult;
|
||||
}
|
||||
|
||||
static get(dataSourceId, query, parameters, maxAge, queryId) {
|
||||
static get(dataSourceId, query, parameters, applyAutoLimit, maxAge, queryId) {
|
||||
const queryResult = new QueryResult();
|
||||
|
||||
const params = {
|
||||
data_source_id: dataSourceId,
|
||||
parameters,
|
||||
query,
|
||||
apply_auto_limit: applyAutoLimit,
|
||||
max_age: maxAge,
|
||||
};
|
||||
|
||||
|
||||
@@ -130,7 +130,8 @@ export class Query {
|
||||
}
|
||||
|
||||
getQueryResult(maxAge) {
|
||||
const execute = () => QueryResult.getByQueryId(this.id, this.getParameters().getExecutionValues(), maxAge);
|
||||
const execute = () =>
|
||||
QueryResult.getByQueryId(this.id, this.getParameters().getExecutionValues(), this.getAutoLimit(), maxAge);
|
||||
return this.prepareQueryResultExecution(execute, maxAge);
|
||||
}
|
||||
|
||||
@@ -141,7 +142,8 @@ export class Query {
|
||||
}
|
||||
|
||||
const parameters = this.getParameters().getExecutionValues({ joinListValues: true });
|
||||
const execute = () => QueryResult.get(this.data_source_id, queryText, parameters, maxAge, this.id);
|
||||
const execute = () =>
|
||||
QueryResult.get(this.data_source_id, queryText, parameters, this.getAutoLimit(), maxAge, this.id);
|
||||
return this.prepareQueryResultExecution(execute, maxAge);
|
||||
}
|
||||
|
||||
@@ -184,6 +186,10 @@ export class Query {
|
||||
return this.$parameters;
|
||||
}
|
||||
|
||||
getAutoLimit() {
|
||||
return this.options.apply_auto_limit;
|
||||
}
|
||||
|
||||
getParametersDefs(update = true) {
|
||||
return this.getParameters().get(update);
|
||||
}
|
||||
|
||||
91
client/app/services/restoreSession.jsx
Normal file
91
client/app/services/restoreSession.jsx
Normal file
@@ -0,0 +1,91 @@
|
||||
import { map } from "lodash";
|
||||
import React from "react";
|
||||
import Modal from "antd/lib/modal";
|
||||
import { Auth } from "@/services/auth";
|
||||
|
||||
const SESSION_RESTORED_MESSAGE = "redash_session_restored";
|
||||
|
||||
export function notifySessionRestored() {
|
||||
if (window.opener) {
|
||||
window.opener.postMessage({ type: SESSION_RESTORED_MESSAGE }, window.location.origin);
|
||||
}
|
||||
}
|
||||
|
||||
function getPopupPosition(width, height) {
|
||||
const windowLeft = window.screenX;
|
||||
const windowTop = window.screenY;
|
||||
const windowWidth = window.innerWidth;
|
||||
const windowHeight = window.innerHeight;
|
||||
|
||||
return {
|
||||
left: Math.floor((windowWidth - width) / 2 + windowLeft),
|
||||
top: Math.floor((windowHeight - height) / 2 + windowTop),
|
||||
width: Math.floor(width),
|
||||
height: Math.floor(height),
|
||||
};
|
||||
}
|
||||
|
||||
function showRestoreSessionPrompt(loginUrl, onSuccess) {
|
||||
let popup = null;
|
||||
|
||||
Modal.warning({
|
||||
content: "Your session has expired. Please login to continue.",
|
||||
okText: (
|
||||
<React.Fragment>
|
||||
<i className="fa fa-external-link m-r-5" />
|
||||
Login
|
||||
</React.Fragment>
|
||||
),
|
||||
centered: true,
|
||||
mask: true,
|
||||
maskClosable: false,
|
||||
keyboard: false,
|
||||
onOk: closeModal => {
|
||||
if (popup && !popup.closed) {
|
||||
popup.focus();
|
||||
return; // popup already shown
|
||||
}
|
||||
|
||||
const popupOptions = {
|
||||
...getPopupPosition(640, 640),
|
||||
menubar: "no",
|
||||
toolbar: "no",
|
||||
location: "yes",
|
||||
resizable: "yes",
|
||||
scrollbars: "yes",
|
||||
status: "yes",
|
||||
};
|
||||
|
||||
popup = window.open(loginUrl, "Restore Session", map(popupOptions, (value, key) => `${key}=${value}`).join(","));
|
||||
|
||||
const handlePostMessage = event => {
|
||||
if (event.data.type === SESSION_RESTORED_MESSAGE) {
|
||||
if (popup) {
|
||||
popup.close();
|
||||
}
|
||||
popup = null;
|
||||
window.removeEventListener("message", handlePostMessage);
|
||||
closeModal();
|
||||
onSuccess();
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener("message", handlePostMessage, false);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
let restoreSessionPromise = null;
|
||||
|
||||
export function restoreSession() {
|
||||
if (!restoreSessionPromise) {
|
||||
restoreSessionPromise = new Promise(resolve => {
|
||||
showRestoreSessionPrompt(Auth.getLoginUrl(), () => {
|
||||
restoreSessionPromise = null;
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return restoreSessionPromise;
|
||||
}
|
||||
@@ -18,4 +18,6 @@ DOMPurify.addHook("afterSanitizeAttributes", function(node) {
|
||||
}
|
||||
});
|
||||
|
||||
export { DOMPurify };
|
||||
|
||||
export default DOMPurify.sanitize;
|
||||
|
||||
@@ -9,6 +9,37 @@ describe("Dashboard Sharing", () => {
|
||||
this.dashboardId = id;
|
||||
this.dashboardUrl = `/dashboards/${id}`;
|
||||
});
|
||||
cy.updateOrgSettings({ disable_public_urls: false });
|
||||
});
|
||||
|
||||
it("is unavailable when public urls feature is disabled", function() {
|
||||
const queryData = {
|
||||
query: "select 1",
|
||||
};
|
||||
|
||||
const position = { autoHeight: false, sizeY: 6 };
|
||||
createQueryAndAddWidget(this.dashboardId, queryData, { position })
|
||||
.then(() => {
|
||||
cy.visit(this.dashboardUrl);
|
||||
return shareDashboard();
|
||||
})
|
||||
.then(secretAddress => {
|
||||
// disable the feature
|
||||
cy.updateOrgSettings({ disable_public_urls: true });
|
||||
|
||||
// check the feature is disabled
|
||||
cy.visit(this.dashboardUrl);
|
||||
cy.getByTestId("DashboardMoreButton").should("exist");
|
||||
cy.getByTestId("OpenShareForm").should("not.exist");
|
||||
|
||||
cy.logout();
|
||||
cy.visit(secretAddress);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("TableVisualization").should("not.exist");
|
||||
|
||||
cy.login();
|
||||
cy.updateOrgSettings({ disable_public_urls: false });
|
||||
});
|
||||
});
|
||||
|
||||
it("is possible if all queries are safe", function() {
|
||||
|
||||
@@ -1,6 +1,43 @@
|
||||
describe("Embedded Queries", () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
cy.updateOrgSettings({ disable_public_urls: false });
|
||||
});
|
||||
|
||||
it("is unavailable when public urls feature is disabled", () => {
|
||||
cy.createQuery({ query: "select name from users order by name" }).then(query => {
|
||||
cy.visit(`/queries/${query.id}/source`);
|
||||
cy.getByTestId("ExecuteButton").click();
|
||||
cy.getByTestId("QueryPageVisualizationTabs", { timeout: 10000 }).should("exist");
|
||||
cy.clickThrough(`
|
||||
QueryControlDropdownButton
|
||||
ShowEmbedDialogButton
|
||||
`);
|
||||
cy.getByTestId("EmbedIframe")
|
||||
.invoke("text")
|
||||
.then(embedUrl => {
|
||||
// disable the feature
|
||||
cy.updateOrgSettings({ disable_public_urls: true });
|
||||
|
||||
// check the feature is disabled
|
||||
cy.visit(`/queries/${query.id}/source`);
|
||||
cy.getByTestId("QueryPageHeaderMoreButton").click();
|
||||
cy.get(".ant-dropdown-menu-item")
|
||||
.should("exist")
|
||||
.should("not.contain", "Show API Key");
|
||||
cy.getByTestId("QueryControlDropdownButton").click();
|
||||
cy.get(".ant-dropdown-menu-item").should("exist");
|
||||
cy.getByTestId("ShowEmbedDialogButton").should("not.exist");
|
||||
|
||||
cy.logout();
|
||||
cy.visit(embedUrl);
|
||||
cy.wait(1500); // eslint-disable-line cypress/no-unnecessary-waiting
|
||||
cy.getByTestId("TableVisualization").should("not.exist");
|
||||
|
||||
cy.login();
|
||||
cy.updateOrgSettings({ disable_public_urls: false });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("can be shared without parameters", () => {
|
||||
|
||||
28
client/cypress/integration/user/create_user_spec.js
Normal file
28
client/cypress/integration/user/create_user_spec.js
Normal file
@@ -0,0 +1,28 @@
|
||||
describe("Create User", () => {
|
||||
beforeEach(() => {
|
||||
cy.login();
|
||||
cy.visit("/users/new");
|
||||
});
|
||||
|
||||
const fillUserFormAndSubmit = (name, email) => {
|
||||
cy.getByTestId("CreateUserDialog").within(() => {
|
||||
cy.getByTestId("Name").type(name);
|
||||
cy.getByTestId("Email").type(email);
|
||||
});
|
||||
cy.getByTestId("SaveUserButton").click();
|
||||
};
|
||||
|
||||
it("creates a new user", () => {
|
||||
// delete existing "new-user@redash.io"
|
||||
cy.request("GET", "api/users?q=new-user")
|
||||
.then(({ body }) => body.results.filter(user => user.email === "new-user@redash.io"))
|
||||
.each(user => cy.request("DELETE", `api/users/${user.id}`));
|
||||
|
||||
fillUserFormAndSubmit("New User", "admin@redash.io");
|
||||
|
||||
cy.getByTestId("CreateUserErrorAlert").should("contain", "Email already taken");
|
||||
|
||||
fillUserFormAndSubmit("{selectall}New User", "{selectall}new-user@redash.io");
|
||||
cy.contains("Saved.");
|
||||
});
|
||||
});
|
||||
@@ -48,11 +48,11 @@ describe("Choropleth", () => {
|
||||
cy.clickThrough(`
|
||||
VisualizationEditor.Tabs.General
|
||||
Choropleth.Editor.MapType
|
||||
Choropleth.Editor.MapType.Countries
|
||||
Choropleth.Editor.MapType.countries
|
||||
Choropleth.Editor.KeyColumn
|
||||
Choropleth.Editor.KeyColumn.name
|
||||
Choropleth.Editor.KeyType
|
||||
Choropleth.Editor.KeyType.name
|
||||
Choropleth.Editor.TargetField
|
||||
Choropleth.Editor.TargetField.name
|
||||
Choropleth.Editor.ValueColumn
|
||||
Choropleth.Editor.ValueColumn.value
|
||||
`);
|
||||
|
||||
@@ -165,3 +165,7 @@ Cypress.Commands.add("addDestinationSubscription", (alertId, destinationName) =>
|
||||
return body;
|
||||
});
|
||||
});
|
||||
|
||||
Cypress.Commands.add("updateOrgSettings", settings => {
|
||||
return post({ url: "api/settings/organization", body: settings }).then(({ body }) => body);
|
||||
});
|
||||
|
||||
36
package-lock.json
generated
36
package-lock.json
generated
@@ -17,7 +17,7 @@
|
||||
"@ant-design/colors": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@ant-design/colors/-/colors-3.2.2.tgz",
|
||||
"integrity": "sha1-WtQ9YZ6RHzSI66wwPWBuZqhCOQM=",
|
||||
"integrity": "sha512-YKgNbG2dlzqMhA9NtI3/pbY16m3Yl/EeWBRa+lB1X1YaYxHrxNexiQYCLTWO/uDvAjLFMEDU+zR901waBtMtjQ==",
|
||||
"requires": {
|
||||
"tinycolor2": "^1.4.1"
|
||||
}
|
||||
@@ -4153,7 +4153,7 @@
|
||||
"array-tree-filter": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/array-tree-filter/-/array-tree-filter-2.1.0.tgz",
|
||||
"integrity": "sha1-hzrAD+yDdJ8lWsjdCDgUtPYykZA="
|
||||
"integrity": "sha512-4ROwICNlNw/Hqa9v+rk5h22KjmzB1JGTMVKP2AKJBOCgb0yL0ASf0+YvCcLNNwquOHNX48jkeZIJ3a+oOQqKcw=="
|
||||
},
|
||||
"array-union": {
|
||||
"version": "1.0.2",
|
||||
@@ -4394,6 +4394,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"axios-auth-refresh": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/axios-auth-refresh/-/axios-auth-refresh-3.0.0.tgz",
|
||||
"integrity": "sha512-0XJnJY711f7opdT+b/au/xw1g4MYrjntXB8Oy5l48plbzOWLjUtJ+m8CtiNLgN3MAvGFJ/Q1NtQ7WKf2euKu6g=="
|
||||
},
|
||||
"axobject-query": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.1.1.tgz",
|
||||
@@ -6018,12 +6023,6 @@
|
||||
"monotone-convex-hull-2d": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"cookie": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz",
|
||||
"integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=",
|
||||
"dev": true
|
||||
},
|
||||
"cookie-signature": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
||||
@@ -6053,7 +6052,7 @@
|
||||
"copy-to-clipboard": {
|
||||
"version": "3.3.1",
|
||||
"resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz",
|
||||
"integrity": "sha1-EVqhqZmP+rYZb5MHatbaO5E2Yq4=",
|
||||
"integrity": "sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==",
|
||||
"requires": {
|
||||
"toggle-selection": "^1.0.6"
|
||||
}
|
||||
@@ -6927,7 +6926,7 @@
|
||||
"dom-align": {
|
||||
"version": "1.12.0",
|
||||
"resolved": "https://registry.npmjs.org/dom-align/-/dom-align-1.12.0.tgz",
|
||||
"integrity": "sha1-VvtxVt8LkQmYMDZNLUj4iWP1opw="
|
||||
"integrity": "sha512-YkoezQuhp3SLFGdOlr5xkqZ640iXrnHAwVYcDg8ZKRUtO7mSzSC2BA5V0VuyAwPSJA4CLIc6EDDJh4bEsD2+zA=="
|
||||
},
|
||||
"dom-converter": {
|
||||
"version": "0.2.0",
|
||||
@@ -8434,6 +8433,12 @@
|
||||
"vary": "~1.1.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"cookie": {
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz",
|
||||
"integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=",
|
||||
"dev": true
|
||||
},
|
||||
"debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
@@ -12142,11 +12147,6 @@
|
||||
"integrity": "sha512-M7kLczedRMYX4L8Mdh4MzyAMM9O5osx+4FcOQuTvr3A9F2D9S5JXheN0ewNbrvK2UatkTRhL5ejGmGSjNMiZuw==",
|
||||
"dev": true
|
||||
},
|
||||
"js-cookie": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz",
|
||||
"integrity": "sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ=="
|
||||
},
|
||||
"js-tokens": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||
@@ -16839,7 +16839,7 @@
|
||||
"resize-observer-polyfill": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz",
|
||||
"integrity": "sha1-DpAg3T0hAkRY1OvSfiPkAmmBBGQ="
|
||||
"integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg=="
|
||||
},
|
||||
"resolve": {
|
||||
"version": "1.10.0",
|
||||
@@ -17327,7 +17327,7 @@
|
||||
"shallowequal": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz",
|
||||
"integrity": "sha1-GI1SHelbkIdAT9TctosT3wrk5/g="
|
||||
"integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ=="
|
||||
},
|
||||
"sharkdown": {
|
||||
"version": "0.1.1",
|
||||
@@ -19946,7 +19946,7 @@
|
||||
"warning": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz",
|
||||
"integrity": "sha1-Fungd+uKhtavfWSqHgX9hbRnjKM=",
|
||||
"integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==",
|
||||
"requires": {
|
||||
"loose-envify": "^1.0.0"
|
||||
}
|
||||
|
||||
@@ -50,6 +50,7 @@
|
||||
"ace-builds": "^1.4.12",
|
||||
"antd": "^4.4.3",
|
||||
"axios": "^0.19.0",
|
||||
"axios-auth-refresh": "^3.0.0",
|
||||
"bootstrap": "^3.3.7",
|
||||
"classnames": "^2.2.6",
|
||||
"d3": "^3.5.17",
|
||||
@@ -58,7 +59,6 @@
|
||||
"font-awesome": "^4.7.0",
|
||||
"history": "^4.10.1",
|
||||
"hoist-non-react-statics": "^3.3.0",
|
||||
"js-cookie": "^2.2.1",
|
||||
"lodash": "^4.17.10",
|
||||
"markdown": "0.5.0",
|
||||
"material-design-iconic-font": "^2.2.0",
|
||||
|
||||
@@ -288,6 +288,7 @@ def client_config():
|
||||
"hidePlotlyModeBar": current_org.get_setting(
|
||||
"hide_plotly_mode_bar"
|
||||
),
|
||||
"disablePublicUrls": current_org.get_setting("disable_public_urls"),
|
||||
"allowCustomJSVisualizations": settings.FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS,
|
||||
"autoPublishNamedQueries": settings.FEATURE_AUTO_PUBLISH_NAMED_QUERIES,
|
||||
"extendedAlertOptions": settings.FEATURE_EXTENDED_ALERT_OPTIONS,
|
||||
|
||||
@@ -268,6 +268,9 @@ class PublicDashboardResource(BaseResource):
|
||||
:param token: An API key for a public dashboard.
|
||||
:>json array widgets: An array of arrays of :ref:`public widgets <public-widget-label>`, corresponding to the rows and columns the widgets are displayed in
|
||||
"""
|
||||
if self.current_org.get_setting("disable_public_urls"):
|
||||
abort(400, message="Public URLs are disabled.")
|
||||
|
||||
if not isinstance(self.current_user, models.ApiUser):
|
||||
api_key = get_object_or_404(models.ApiKey.get_by_api_key, token)
|
||||
dashboard = api_key.object
|
||||
|
||||
@@ -366,6 +366,12 @@ class QueryResource(BaseResource):
|
||||
if "tags" in query_def:
|
||||
query_def["tags"] = [tag for tag in query_def["tags"] if tag]
|
||||
|
||||
if "data_source_id" in query_def:
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
query_def["data_source_id"], self.current_org
|
||||
)
|
||||
require_access(data_source, self.current_user, not_view_only)
|
||||
|
||||
query_def["last_modified_by"] = self.current_user
|
||||
query_def["changed_by"] = self.current_user
|
||||
# SQLAlchemy handles the case where a concurrent transaction beats us
|
||||
@@ -488,9 +494,9 @@ class QueryRefreshResource(BaseResource):
|
||||
|
||||
parameter_values = collect_parameters_from_request(request.args)
|
||||
parameterized_query = ParameterizedQuery(query.query_text, org=self.current_org)
|
||||
|
||||
should_apply_auto_limit = query.options.get("apply_auto_limit", False)
|
||||
return run_query(
|
||||
parameterized_query, parameter_values, query.data_source, query.id
|
||||
parameterized_query, parameter_values, query.data_source, query.id, should_apply_auto_limit
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ from redash.tasks import Job
|
||||
from redash.tasks.queries import enqueue_query
|
||||
from redash.utils import (
|
||||
collect_parameters_from_request,
|
||||
gen_query_hash,
|
||||
json_dumps,
|
||||
utcnow,
|
||||
to_filename,
|
||||
@@ -61,7 +60,7 @@ error_messages = {
|
||||
}
|
||||
|
||||
|
||||
def run_query(query, parameters, data_source, query_id, max_age=0):
|
||||
def run_query(query, parameters, data_source, query_id, should_apply_auto_limit, max_age=0):
|
||||
if data_source.paused:
|
||||
if data_source.pause_reason:
|
||||
message = "{} is paused ({}). Please try later.".format(
|
||||
@@ -77,6 +76,8 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
||||
except (InvalidParameterError, QueryDetachedFromDataSourceError) as e:
|
||||
abort(400, message=str(e))
|
||||
|
||||
query_text = data_source.query_runner.apply_auto_limit(query.text, should_apply_auto_limit)
|
||||
|
||||
if query.missing_params:
|
||||
return error_response(
|
||||
"Missing parameter value for: {}".format(", ".join(query.missing_params))
|
||||
@@ -85,7 +86,7 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
||||
if max_age == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = models.QueryResult.get_latest(data_source, query.text, max_age)
|
||||
query_result = models.QueryResult.get_latest(data_source, query_text, max_age)
|
||||
|
||||
record_event(
|
||||
current_user.org,
|
||||
@@ -95,7 +96,7 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
||||
"cache": "hit" if query_result else "miss",
|
||||
"object_id": data_source.id,
|
||||
"object_type": "data_source",
|
||||
"query": query.text,
|
||||
"query": query_text,
|
||||
"query_id": query_id,
|
||||
"parameters": parameters,
|
||||
},
|
||||
@@ -109,7 +110,7 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
||||
}
|
||||
else:
|
||||
job = enqueue_query(
|
||||
query.text,
|
||||
query_text,
|
||||
data_source,
|
||||
current_user.id,
|
||||
current_user.is_api_user(),
|
||||
@@ -180,6 +181,7 @@ class QueryResultListResource(BaseResource):
|
||||
)
|
||||
|
||||
parameterized_query = ParameterizedQuery(query, org=self.current_org)
|
||||
should_apply_auto_limit = params.get("apply_auto_limit", False)
|
||||
|
||||
data_source_id = params.get("data_source_id")
|
||||
if data_source_id:
|
||||
@@ -193,7 +195,7 @@ class QueryResultListResource(BaseResource):
|
||||
return error_messages["no_permission"]
|
||||
|
||||
return run_query(
|
||||
parameterized_query, parameters, data_source, query_id, max_age
|
||||
parameterized_query, parameters, data_source, query_id, should_apply_auto_limit, max_age
|
||||
)
|
||||
|
||||
|
||||
@@ -286,6 +288,7 @@ class QueryResultResource(BaseResource):
|
||||
)
|
||||
|
||||
allow_executing_with_view_only_permissions = query.parameterized.is_safe
|
||||
should_apply_auto_limit = params.get("apply_auto_limit", False)
|
||||
|
||||
if has_access(
|
||||
query, self.current_user, allow_executing_with_view_only_permissions
|
||||
@@ -295,6 +298,7 @@ class QueryResultResource(BaseResource):
|
||||
parameter_values,
|
||||
query.data_source,
|
||||
query_id,
|
||||
should_apply_auto_limit,
|
||||
max_age,
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -30,7 +30,7 @@ from redash.query_runner import (
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_DATE,
|
||||
TYPE_DATETIME,
|
||||
)
|
||||
BaseQueryRunner)
|
||||
from redash.utils import (
|
||||
generate_token,
|
||||
json_dumps,
|
||||
@@ -38,7 +38,7 @@ from redash.utils import (
|
||||
mustache_render,
|
||||
base_url,
|
||||
sentry,
|
||||
)
|
||||
gen_query_hash)
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.parameterized_query import ParameterizedQuery
|
||||
|
||||
@@ -122,6 +122,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
"syntax": self.query_runner.syntax,
|
||||
"paused": self.paused,
|
||||
"pause_reason": self.pause_reason,
|
||||
"supports_auto_limit": self.query_runner.supports_auto_limit
|
||||
}
|
||||
|
||||
if all:
|
||||
@@ -358,7 +359,7 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
|
||||
|
||||
@classmethod
|
||||
def get_latest(cls, data_source, query, max_age=0):
|
||||
query_hash = utils.gen_query_hash(query)
|
||||
query_hash = gen_query_hash(query)
|
||||
|
||||
if max_age == -1:
|
||||
query = cls.query.filter(
|
||||
@@ -864,11 +865,16 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
api_keys = db.session.execute(query, {"id": self.id}).fetchall()
|
||||
return [api_key[0] for api_key in api_keys]
|
||||
|
||||
def update_query_hash(self):
|
||||
should_apply_auto_limit = self.options.get("apply_auto_limit", False) if self.options else False
|
||||
query_runner = self.data_source.query_runner if self.data_source else BaseQueryRunner({})
|
||||
self.query_hash = query_runner.gen_query_hash(self.query_text, should_apply_auto_limit)
|
||||
|
||||
@listens_for(Query.query_text, "set")
|
||||
def gen_query_hash(target, val, oldval, initiator):
|
||||
target.query_hash = utils.gen_query_hash(val)
|
||||
target.schedule_failures = 0
|
||||
|
||||
@listens_for(Query, "before_insert")
|
||||
@listens_for(Query, "before_update")
|
||||
def receive_before_insert_update(mapper, connection, target):
|
||||
target.update_query_hash()
|
||||
|
||||
|
||||
@listens_for(Query.user_id, "set")
|
||||
|
||||
@@ -9,8 +9,8 @@ from urllib.parse import urlparse
|
||||
|
||||
from six import text_type
|
||||
from sshtunnel import open_tunnel
|
||||
from redash import settings
|
||||
from redash.utils import json_loads
|
||||
from redash import settings, utils
|
||||
from redash.utils import json_loads, query_is_select_no_limit, add_limit_to_query
|
||||
from rq.timeouts import JobTimeoutException
|
||||
|
||||
from redash.utils.requests_session import requests, requests_session
|
||||
@@ -83,7 +83,7 @@ class BaseQueryRunner(object):
|
||||
"""Returns this query runner's configured host.
|
||||
This is used primarily for temporarily swapping endpoints when using SSH tunnels to connect to a data source.
|
||||
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
configuration values. If your query runner uses a different schema (e.g. a web address), you should override this function.
|
||||
"""
|
||||
if "host" in self.configuration:
|
||||
@@ -96,7 +96,7 @@ class BaseQueryRunner(object):
|
||||
"""Sets this query runner's configured host.
|
||||
This is used primarily for temporarily swapping endpoints when using SSH tunnels to connect to a data source.
|
||||
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
configuration values. If your query runner uses a different schema (e.g. a web address), you should override this function.
|
||||
"""
|
||||
if "host" in self.configuration:
|
||||
@@ -109,7 +109,7 @@ class BaseQueryRunner(object):
|
||||
"""Returns this query runner's configured port.
|
||||
This is used primarily for temporarily swapping endpoints when using SSH tunnels to connect to a data source.
|
||||
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
configuration values. If your query runner uses a different schema (e.g. a web address), you should override this function.
|
||||
"""
|
||||
if "port" in self.configuration:
|
||||
@@ -122,7 +122,7 @@ class BaseQueryRunner(object):
|
||||
"""Sets this query runner's configured port.
|
||||
This is used primarily for temporarily swapping endpoints when using SSH tunnels to connect to a data source.
|
||||
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
`BaseQueryRunner`'s naïve implementation supports query runner implementations that store endpoints using `host` and `port`
|
||||
configuration values. If your query runner uses a different schema (e.g. a web address), you should override this function.
|
||||
"""
|
||||
if "port" in self.configuration:
|
||||
@@ -190,6 +190,17 @@ class BaseQueryRunner(object):
|
||||
**({"deprecated": True} if cls.deprecated else {}),
|
||||
}
|
||||
|
||||
@property
|
||||
def supports_auto_limit(self):
|
||||
return False
|
||||
|
||||
def apply_auto_limit(self, query_text, should_apply_auto_limit):
|
||||
return query_text
|
||||
|
||||
def gen_query_hash(self, query_text, set_auto_limit=False):
|
||||
query_text = self.apply_auto_limit(query_text, set_auto_limit)
|
||||
return utils.gen_query_hash(query_text)
|
||||
|
||||
|
||||
class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
def get_schema(self, get_stats=False):
|
||||
@@ -208,6 +219,22 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
res = self._run_query_internal("select count(*) as cnt from %s" % t)
|
||||
tables_dict[t]["size"] = res[0]["cnt"]
|
||||
|
||||
@property
|
||||
def supports_auto_limit(self):
|
||||
return True
|
||||
|
||||
def apply_auto_limit(self, query_text, should_apply_auto_limit):
|
||||
if should_apply_auto_limit:
|
||||
from redash.query_runner.databricks import split_sql_statements, combine_sql_statements
|
||||
queries = split_sql_statements(query_text)
|
||||
# we only check for last one in the list because it is the one that we show result
|
||||
last_query = queries[-1]
|
||||
if query_is_select_no_limit(last_query):
|
||||
queries[-1] = add_limit_to_query(last_query)
|
||||
return combine_sql_statements(queries)
|
||||
else:
|
||||
return query_text
|
||||
|
||||
|
||||
def is_private_address(url):
|
||||
hostname = urlparse(url).hostname
|
||||
|
||||
@@ -267,39 +267,55 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
service = self._get_bigquery_service()
|
||||
project_id = self._get_project_id()
|
||||
datasets = service.datasets().list(projectId=project_id).execute()
|
||||
schema = []
|
||||
for dataset in datasets.get("datasets", []):
|
||||
dataset_id = dataset["datasetReference"]["datasetId"]
|
||||
tables = (
|
||||
service.tables()
|
||||
.list(projectId=project_id, datasetId=dataset_id)
|
||||
.execute()
|
||||
# get a list of Big Query datasets
|
||||
datasets_request = service.datasets().list(
|
||||
projectId=project_id,
|
||||
fields="datasets/datasetReference/datasetId,nextPageToken",
|
||||
)
|
||||
datasets = []
|
||||
while datasets_request:
|
||||
# request datasets
|
||||
datasets_response = datasets_request.execute()
|
||||
# store results
|
||||
datasets.extend(datasets_response.get("datasets", []))
|
||||
# try loading next page
|
||||
datasets_request = service.datasets().list_next(
|
||||
datasets_request,
|
||||
datasets_response,
|
||||
)
|
||||
while True:
|
||||
for table in tables.get("tables", []):
|
||||
|
||||
schema = []
|
||||
# load all tables for all datasets
|
||||
for dataset in datasets:
|
||||
dataset_id = dataset["datasetReference"]["datasetId"]
|
||||
tables_request = service.tables().list(
|
||||
projectId=project_id,
|
||||
datasetId=dataset_id,
|
||||
fields="tables/tableReference/tableId,nextPageToken",
|
||||
)
|
||||
while tables_request:
|
||||
# request tables with fields above
|
||||
tables_response = tables_request.execute()
|
||||
for table in tables_response.get("tables", []):
|
||||
# load schema for given table
|
||||
table_data = (
|
||||
service.tables()
|
||||
.get(
|
||||
projectId=project_id,
|
||||
datasetId=dataset_id,
|
||||
tableId=table["tableReference"]["tableId"],
|
||||
fields="id,schema",
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
# build schema data with given table data
|
||||
table_schema = self._get_columns_schema(table_data)
|
||||
schema.append(table_schema)
|
||||
|
||||
next_token = tables.get("nextPageToken", None)
|
||||
if next_token is None:
|
||||
break
|
||||
|
||||
tables = (
|
||||
service.tables()
|
||||
.list(
|
||||
projectId=project_id, datasetId=dataset_id, pageToken=next_token
|
||||
)
|
||||
.execute()
|
||||
# try loading next page of results
|
||||
tables_request = service.tables().list_next(
|
||||
tables_request,
|
||||
tables_response,
|
||||
)
|
||||
|
||||
return schema
|
||||
|
||||
@@ -21,7 +21,6 @@ try:
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
|
||||
TYPES_MAP = {
|
||||
str: TYPE_STRING,
|
||||
bool: TYPE_BOOLEAN,
|
||||
@@ -83,6 +82,10 @@ def split_sql_statements(query):
|
||||
return [""] # if all statements were empty - return a single empty statement
|
||||
|
||||
|
||||
def combine_sql_statements(queries):
|
||||
return ";\n".join(queries)
|
||||
|
||||
|
||||
class Databricks(BaseSQLQueryRunner):
|
||||
noop_query = "SELECT 1"
|
||||
should_annotate_query = False
|
||||
|
||||
@@ -27,6 +27,7 @@ def init_app(app):
|
||||
csrf.init_app(app)
|
||||
app.config["WTF_CSRF_CHECK_DEFAULT"] = False
|
||||
app.config["WTF_CSRF_SSL_STRICT"] = False
|
||||
app.config["WTF_CSRF_TIME_LIMIT"] = settings.CSRF_TIME_LIMIT
|
||||
|
||||
@app.after_request
|
||||
def inject_csrf_token(response):
|
||||
|
||||
@@ -505,4 +505,6 @@ REQUESTS_ALLOW_REDIRECTS = parse_boolean(
|
||||
# This is turned off by default to avoid breaking any existing deployments but it is highly recommended to turn this toggle on to prevent CSRF attacks.
|
||||
ENFORCE_CSRF = parse_boolean(
|
||||
os.environ.get("REDASH_ENFORCE_CSRF", "false")
|
||||
)
|
||||
)
|
||||
|
||||
CSRF_TIME_LIMIT = int(os.environ.get("REDASH_CSRF_TIME_LIMIT", 3600 * 6))
|
||||
@@ -43,8 +43,9 @@ FEATURE_SHOW_PERMISSIONS_CONTROL = parse_boolean(
|
||||
SEND_EMAIL_ON_FAILED_SCHEDULED_QUERIES = parse_boolean(
|
||||
os.environ.get("REDASH_SEND_EMAIL_ON_FAILED_SCHEDULED_QUERIES", "false")
|
||||
)
|
||||
HIDE_PLOTLY_MODE_BAR = parse_boolean(
|
||||
os.environ.get("HIDE_PLOTLY_MODE_BAR", "false")
|
||||
HIDE_PLOTLY_MODE_BAR = parse_boolean(os.environ.get("HIDE_PLOTLY_MODE_BAR", "false"))
|
||||
DISABLE_PUBLIC_URLS = parse_boolean(
|
||||
os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false")
|
||||
)
|
||||
|
||||
settings = {
|
||||
@@ -69,4 +70,5 @@ settings = {
|
||||
"feature_show_permissions_control": FEATURE_SHOW_PERMISSIONS_CONTROL,
|
||||
"send_email_on_failed_scheduled_queries": SEND_EMAIL_ON_FAILED_SCHEDULED_QUERIES,
|
||||
"hide_plotly_mode_bar": HIDE_PLOTLY_MODE_BAR,
|
||||
"disable_public_urls": DISABLE_PUBLIC_URLS,
|
||||
}
|
||||
|
||||
@@ -76,6 +76,11 @@ class RefreshQueriesError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _apply_auto_limit(query_text, query):
|
||||
should_apply_auto_limit = query.options.get("apply_auto_limit", False)
|
||||
return query.data_source.query_runner.apply_auto_limit(query_text, should_apply_auto_limit)
|
||||
|
||||
|
||||
def refresh_queries():
|
||||
logger.info("Refreshing queries...")
|
||||
enqueued = []
|
||||
@@ -84,8 +89,10 @@ def refresh_queries():
|
||||
continue
|
||||
|
||||
try:
|
||||
query_text = _apply_default_parameters(query)
|
||||
query_text = _apply_auto_limit(query_text, query)
|
||||
enqueue_query(
|
||||
_apply_default_parameters(query),
|
||||
query_text,
|
||||
query.data_source,
|
||||
query.user_id,
|
||||
scheduled_query=query,
|
||||
|
||||
@@ -11,10 +11,8 @@ from rq.job import Job as BaseJob, JobStatus
|
||||
|
||||
class CancellableJob(BaseJob):
|
||||
def cancel(self, pipeline=None):
|
||||
# TODO - add tests that verify that queued jobs are removed from queue and running jobs are actively cancelled
|
||||
if self.is_started:
|
||||
self.meta["cancelled"] = True
|
||||
self.save_meta()
|
||||
self.meta["cancelled"] = True
|
||||
self.save_meta()
|
||||
|
||||
super().cancel(pipeline=pipeline)
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import binascii
|
||||
import pystache
|
||||
import pytz
|
||||
import simplejson
|
||||
import sqlparse
|
||||
from flask import current_app
|
||||
from funcy import select_values
|
||||
from redash import settings
|
||||
@@ -20,7 +21,6 @@ from sqlalchemy.orm.query import Query
|
||||
|
||||
from .human_time import parse_human_time
|
||||
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
WRITER_ENCODING = os.environ.get("REDASH_CSV_WRITER_ENCODING", "utf-8")
|
||||
WRITER_ERRORS = os.environ.get("REDASH_CSV_WRITER_ERRORS", "strict")
|
||||
@@ -70,8 +70,7 @@ def generate_token(length):
|
||||
|
||||
class JSONEncoder(simplejson.JSONEncoder):
|
||||
"""Adapter for `simplejson.dumps`."""
|
||||
|
||||
|
||||
|
||||
def default(self, o):
|
||||
# Some SQLAlchemy collections are lazy.
|
||||
if isinstance(o, Query):
|
||||
@@ -213,3 +212,33 @@ def render_template(path, context):
|
||||
function decorated with the `context_processor` decorator, which is not explicitly required for rendering purposes.
|
||||
"""
|
||||
current_app.jinja_env.get_template(path).render(**context)
|
||||
|
||||
|
||||
def query_is_select_no_limit(query):
|
||||
parsed_query = sqlparse.parse(query)[0]
|
||||
last_keyword_idx = find_last_keyword_idx(parsed_query)
|
||||
# Either invalid query or query that is not select
|
||||
if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT":
|
||||
return False
|
||||
|
||||
no_limit = parsed_query.tokens[last_keyword_idx].value.upper() != "LIMIT" \
|
||||
and parsed_query.tokens[last_keyword_idx].value.upper() != "OFFSET"
|
||||
return no_limit
|
||||
|
||||
|
||||
def find_last_keyword_idx(parsed_query):
|
||||
for i in reversed(range(len(parsed_query.tokens))):
|
||||
if parsed_query.tokens[i].ttype in sqlparse.tokens.Keyword:
|
||||
return i
|
||||
return -1
|
||||
|
||||
|
||||
def add_limit_to_query(query):
|
||||
parsed_query = sqlparse.parse(query)[0]
|
||||
limit_tokens = sqlparse.parse(" LIMIT 1000")[0].tokens
|
||||
length = len(parsed_query.tokens)
|
||||
if parsed_query.tokens[length - 1].ttype == sqlparse.tokens.Punctuation:
|
||||
parsed_query.tokens[length - 1:length - 1] = limit_tokens
|
||||
else:
|
||||
parsed_query.tokens += limit_tokens
|
||||
return str(parsed_query)
|
||||
|
||||
@@ -121,6 +121,22 @@ class TestQueryResourcePost(BaseTestCase):
|
||||
)
|
||||
self.assertEqual(rv.status_code, 409)
|
||||
|
||||
def test_prevents_association_with_view_only_data_sources(self):
|
||||
view_only_data_source = self.factory.create_data_source(view_only=True)
|
||||
my_data_source = self.factory.create_data_source()
|
||||
|
||||
my_query = self.factory.create_query(data_source=my_data_source)
|
||||
db.session.add(my_query)
|
||||
|
||||
rv = self.make_request(
|
||||
"post",
|
||||
"/api/queries/{0}".format(my_query.id),
|
||||
data={"data_source_id": view_only_data_source.id},
|
||||
user=self.factory.user,
|
||||
)
|
||||
|
||||
self.assertEqual(rv.status_code, 403)
|
||||
|
||||
def test_allows_association_with_authorized_dropdown_queries(self):
|
||||
data_source = self.factory.create_data_source(group=self.factory.default_group)
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from redash.query_runner import BaseSQLQueryRunner, BaseQueryRunner
|
||||
from tests import BaseTestCase
|
||||
|
||||
from redash.models import db
|
||||
@@ -39,7 +40,7 @@ class TestQueryResultsContentDispositionHeaders(BaseTestCase):
|
||||
try:
|
||||
rv.headers['Content-Disposition'].encode('ascii')
|
||||
except Exception as e:
|
||||
self.fail(repr(e))
|
||||
self.fail(repr(e))
|
||||
|
||||
|
||||
class TestQueryResultListAPI(BaseTestCase):
|
||||
@@ -76,6 +77,47 @@ class TestQueryResultListAPI(BaseTestCase):
|
||||
self.assertNotIn("query_result", rv.json)
|
||||
self.assertIn("job", rv.json)
|
||||
|
||||
def test_add_limit_change_query_sql(self):
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="pg"
|
||||
)
|
||||
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||
query_result = self.factory.create_query_result(data_source=ds, query_hash=query.query_hash)
|
||||
|
||||
rv = self.make_request(
|
||||
"post",
|
||||
"/api/query_results",
|
||||
data={
|
||||
"data_source_id": ds.id,
|
||||
"query": query.query_text,
|
||||
"apply_auto_limit": True
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
self.assertNotIn("query_result", rv.json)
|
||||
self.assertIn("job", rv.json)
|
||||
|
||||
def test_add_limit_no_change_for_nonsql(self):
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="prometheus"
|
||||
)
|
||||
query = self.factory.create_query(query_text="SELECT 5", data_source=ds)
|
||||
query_result = self.factory.create_query_result(data_source=ds, query_hash=query.query_hash)
|
||||
|
||||
rv = self.make_request(
|
||||
"post",
|
||||
"/api/query_results",
|
||||
data={
|
||||
"data_source_id": ds.id,
|
||||
"query": query.query_text,
|
||||
"apply_auto_limit": True
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
self.assertEqual(query_result.id, rv.json["query_result"]["id"])
|
||||
|
||||
def test_execute_query_without_access(self):
|
||||
group = self.factory.create_group()
|
||||
db.session.commit()
|
||||
@@ -445,3 +487,23 @@ class TestQueryResultExcelResponse(BaseTestCase):
|
||||
is_json=False,
|
||||
)
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
|
||||
|
||||
class TestJobResource(BaseTestCase):
|
||||
def test_cancels_queued_queries(self):
|
||||
QUEUED = 1
|
||||
FAILED = 4
|
||||
|
||||
query = self.factory.create_query()
|
||||
job_id = self.make_request(
|
||||
"post", f"/api/queries/{query.id}/results", data={"parameters": {}},
|
||||
).json["job"]["id"]
|
||||
|
||||
status = self.make_request("get", f"/api/jobs/{job_id}").json["job"]["status"]
|
||||
self.assertEqual(status, QUEUED)
|
||||
|
||||
self.make_request("delete", f"/api/jobs/{job_id}")
|
||||
|
||||
job = self.make_request("get", f"/api/jobs/{job_id}").json["job"]
|
||||
self.assertEqual(job["status"], FAILED)
|
||||
self.assertTrue("cancelled" in job["error"])
|
||||
102
tests/query_runner/test_basesql_queryrunner.py
Normal file
102
tests/query_runner/test_basesql_queryrunner.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import unittest
|
||||
|
||||
from redash.query_runner import BaseSQLQueryRunner, BaseQueryRunner
|
||||
from redash.utils import gen_query_hash
|
||||
|
||||
|
||||
class TestBaseSQLQueryRunner(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.query_runner = BaseSQLQueryRunner({})
|
||||
|
||||
def test_apply_auto_limit_origin_no_limit_1(self):
|
||||
origin_query_text = "SELECT 2"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual("SELECT 2 LIMIT 1000", query_text)
|
||||
|
||||
def test_apply_auto_limit_origin_have_limit_1(self):
|
||||
origin_query_text = "SELECT 2 LIMIT 100"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text, query_text)
|
||||
|
||||
def test_apply_auto_limit_origin_have_limit_2(self):
|
||||
origin_query_text = "SELECT * FROM fake WHERE id IN (SELECT id FROM fake_2 LIMIT 200) LIMIT 200"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text, query_text)
|
||||
|
||||
def test_apply_auto_limit_origin_no_limit_2(self):
|
||||
origin_query_text = "SELECT * FROM fake WHERE id IN (SELECT id FROM fake_2 LIMIT 200)"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text + " LIMIT 1000", query_text)
|
||||
|
||||
def test_apply_auto_limit_non_select_query(self):
|
||||
origin_query_text = ("create table execution_times as "
|
||||
"(select id, retrieved_at, data_source_id, query, runtime, query_hash "
|
||||
"from query_results order by 1 desc)")
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text, query_text)
|
||||
|
||||
def test_apply_auto_limit_error_query(self):
|
||||
origin_query_text = "dklsk jdhsajhdiwc kkdsakjdwi mdklsjal"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text, query_text)
|
||||
|
||||
def test_apply_auto_limit_multi_query_add_limit_1(self):
|
||||
origin_query_text = ("insert into execution_times (id, retrieved_at, data_source_id, query, runtime, query_hash) "
|
||||
"select id, retrieved_at, data_source_id, query, runtime, query_hash from query_results "
|
||||
"where id > (select max(id) from execution_times);\n"
|
||||
"select max(id), 'execution_times' as table_name from execution_times "
|
||||
"union all "
|
||||
"select max(id), 'query_results' as table_name from query_results")
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text + " LIMIT 1000", query_text)
|
||||
|
||||
def test_apply_auto_limit_multi_query_add_limit_2(self):
|
||||
origin_query_text = "use database demo;\n" \
|
||||
"select * from data"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text + " LIMIT 1000", query_text)
|
||||
|
||||
def test_apply_auto_limit_multi_query_end_with_punc(self):
|
||||
origin_query_text = ("select * from table1;\n"
|
||||
"select * from table2")
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual("select * from table1;\nselect * from table2 LIMIT 1000", query_text)
|
||||
|
||||
def test_apply_auto_limit_multi_query_last_not_select(self):
|
||||
origin_query_text = ("select * from table1;\n"
|
||||
"CREATE TABLE Persons (PersonID int)")
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual(origin_query_text, query_text)
|
||||
|
||||
def test_apply_auto_limit_last_command_comment(self):
|
||||
origin_query_text = "select * from raw_events; # comment"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual("select * from raw_events LIMIT 1000", query_text)
|
||||
|
||||
def test_apply_auto_limit_last_command_comment_2(self):
|
||||
origin_query_text = "select * from raw_events; -- comment"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual("select * from raw_events LIMIT 1000", query_text)
|
||||
|
||||
def test_apply_auto_limit_inline_comment(self):
|
||||
origin_query_text = "select * from raw_events -- comment"
|
||||
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||
self.assertEqual("select * from raw_events LIMIT 1000", query_text)
|
||||
|
||||
def test_gen_query_hash_baseSQL(self):
|
||||
origin_query_text = "select *"
|
||||
expected_query_text = "select * LIMIT 1000"
|
||||
base_runner = BaseQueryRunner({})
|
||||
self.assertEqual(base_runner.gen_query_hash(expected_query_text),
|
||||
self.query_runner.gen_query_hash(origin_query_text, True))
|
||||
|
||||
def test_gen_query_hash_NoneSQL(self):
|
||||
origin_query_text = "select *"
|
||||
base_runner = BaseQueryRunner({})
|
||||
self.assertEqual(gen_query_hash(origin_query_text),
|
||||
base_runner.gen_query_hash(origin_query_text, True))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -7,14 +7,53 @@ ENQUEUE_QUERY = "redash.tasks.queries.maintenance.enqueue_query"
|
||||
|
||||
|
||||
class TestRefreshQuery(BaseTestCase):
|
||||
def test_enqueues_outdated_queries(self):
|
||||
def test_enqueues_outdated_queries_for_sqlquery(self):
|
||||
"""
|
||||
refresh_queries() launches an execution task for each query returned
|
||||
from Query.outdated_queries().
|
||||
"""
|
||||
query1 = self.factory.create_query()
|
||||
query1 = self.factory.create_query(options={"apply_auto_limit": True})
|
||||
query2 = self.factory.create_query(
|
||||
query_text="select 42;", data_source=self.factory.create_data_source()
|
||||
query_text="select 42;", data_source=self.factory.create_data_source(),
|
||||
options={"apply_auto_limit": True}
|
||||
)
|
||||
oq = staticmethod(lambda: [query1, query2])
|
||||
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||
Query, "outdated_queries", oq
|
||||
):
|
||||
refresh_queries()
|
||||
self.assertEqual(add_job_mock.call_count, 2)
|
||||
add_job_mock.assert_has_calls(
|
||||
[
|
||||
call(
|
||||
query1.query_text + " LIMIT 1000",
|
||||
query1.data_source,
|
||||
query1.user_id,
|
||||
scheduled_query=query1,
|
||||
metadata=ANY,
|
||||
),
|
||||
call(
|
||||
"select 42 LIMIT 1000",
|
||||
query2.data_source,
|
||||
query2.user_id,
|
||||
scheduled_query=query2,
|
||||
metadata=ANY,
|
||||
),
|
||||
],
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
def test_enqueues_outdated_queries_for_non_sqlquery(self):
|
||||
"""
|
||||
refresh_queries() launches an execution task for each query returned
|
||||
from Query.outdated_queries().
|
||||
"""
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="prometheus"
|
||||
)
|
||||
query1 = self.factory.create_query(data_source=ds, options={"apply_auto_limit": True})
|
||||
query2 = self.factory.create_query(
|
||||
query_text="select 42;", data_source=ds, options={"apply_auto_limit": True}
|
||||
)
|
||||
oq = staticmethod(lambda: [query1, query2])
|
||||
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||
@@ -30,7 +69,7 @@ class TestRefreshQuery(BaseTestCase):
|
||||
query1.user_id,
|
||||
scheduled_query=query1,
|
||||
metadata=ANY,
|
||||
),
|
||||
),
|
||||
call(
|
||||
query2.query_text,
|
||||
query2.data_source,
|
||||
@@ -42,12 +81,40 @@ class TestRefreshQuery(BaseTestCase):
|
||||
any_order=True,
|
||||
)
|
||||
|
||||
def test_doesnt_enqueue_outdated_queries_for_paused_data_source(self):
|
||||
def test_doesnt_enqueue_outdated_queries_for_paused_data_source_for_sqlquery(self):
|
||||
"""
|
||||
refresh_queries() does not launch execution tasks for queries whose
|
||||
data source is paused.
|
||||
"""
|
||||
query = self.factory.create_query()
|
||||
query = self.factory.create_query(options={"apply_auto_limit": True})
|
||||
oq = staticmethod(lambda: [query])
|
||||
query.data_source.pause()
|
||||
with patch.object(Query, "outdated_queries", oq):
|
||||
with patch(ENQUEUE_QUERY) as add_job_mock:
|
||||
refresh_queries()
|
||||
add_job_mock.assert_not_called()
|
||||
|
||||
query.data_source.resume()
|
||||
|
||||
with patch(ENQUEUE_QUERY) as add_job_mock:
|
||||
refresh_queries()
|
||||
add_job_mock.assert_called_with(
|
||||
query.query_text + " LIMIT 1000",
|
||||
query.data_source,
|
||||
query.user_id,
|
||||
scheduled_query=query,
|
||||
metadata=ANY,
|
||||
)
|
||||
|
||||
def test_doesnt_enqueue_outdated_queries_for_paused_data_source_for_non_sqlquery(self):
|
||||
"""
|
||||
refresh_queries() does not launch execution tasks for queries whose
|
||||
data source is paused.
|
||||
"""
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="prometheus"
|
||||
)
|
||||
query = self.factory.create_query(data_source=ds, options={"apply_auto_limit": True})
|
||||
oq = staticmethod(lambda: [query])
|
||||
query.data_source.pause()
|
||||
with patch.object(Query, "outdated_queries", oq):
|
||||
@@ -65,9 +132,9 @@ class TestRefreshQuery(BaseTestCase):
|
||||
query.user_id,
|
||||
scheduled_query=query,
|
||||
metadata=ANY,
|
||||
)
|
||||
)
|
||||
|
||||
def test_enqueues_parameterized_queries(self):
|
||||
def test_enqueues_parameterized_queries_for_sqlquery(self):
|
||||
"""
|
||||
Scheduled queries with parameters use saved values.
|
||||
"""
|
||||
@@ -82,10 +149,48 @@ class TestRefreshQuery(BaseTestCase):
|
||||
"value": "42",
|
||||
"title": "n",
|
||||
}
|
||||
]
|
||||
],
|
||||
"apply_auto_limit": True
|
||||
},
|
||||
)
|
||||
oq = staticmethod(lambda: [query])
|
||||
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||
Query, "outdated_queries", oq
|
||||
):
|
||||
refresh_queries()
|
||||
add_job_mock.assert_called_with(
|
||||
"select 42 LIMIT 1000",
|
||||
query.data_source,
|
||||
query.user_id,
|
||||
scheduled_query=query,
|
||||
metadata=ANY,
|
||||
)
|
||||
|
||||
def test_enqueues_parameterized_queries_for_non_sqlquery(self):
|
||||
"""
|
||||
Scheduled queries with parameters use saved values.
|
||||
"""
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="prometheus"
|
||||
)
|
||||
query = self.factory.create_query(
|
||||
query_text="select {{n}}",
|
||||
options={
|
||||
"parameters": [
|
||||
{
|
||||
"global": False,
|
||||
"type": "text",
|
||||
"name": "n",
|
||||
"value": "42",
|
||||
"title": "n",
|
||||
}
|
||||
],
|
||||
"apply_auto_limit": True
|
||||
|
||||
},
|
||||
data_source=ds,
|
||||
)
|
||||
oq = staticmethod(lambda: [query])
|
||||
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||
Query, "outdated_queries", oq
|
||||
):
|
||||
@@ -113,7 +218,8 @@ class TestRefreshQuery(BaseTestCase):
|
||||
"value": 42, # <-- should be text!
|
||||
"title": "n",
|
||||
}
|
||||
]
|
||||
],
|
||||
"apply_auto_limit": True
|
||||
},
|
||||
)
|
||||
oq = staticmethod(lambda: [query])
|
||||
@@ -140,7 +246,8 @@ class TestRefreshQuery(BaseTestCase):
|
||||
"queryId": 100,
|
||||
"title": "n",
|
||||
}
|
||||
]
|
||||
],
|
||||
"apply_auto_limit": True
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from unittest import TestCase
|
||||
|
||||
import pytz
|
||||
from dateutil.parser import parse as date_parse
|
||||
|
||||
from tests import BaseTestCase
|
||||
|
||||
from redash import models, redis_connection
|
||||
@@ -471,6 +472,37 @@ class TestQueryAll(BaseTestCase):
|
||||
qs2 = base.order_by(models.User.name.desc())
|
||||
self.assertEqual(["bob", "alice"], [q.user.name for q in qs2])
|
||||
|
||||
def test_update_query_hash_basesql_with_options(self):
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="pg"
|
||||
)
|
||||
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||
query.options = {"apply_auto_limit": True}
|
||||
origin_hash = query.query_hash
|
||||
query.update_query_hash()
|
||||
self.assertNotEqual(origin_hash, query.query_hash)
|
||||
|
||||
def test_update_query_hash_basesql_no_options(self):
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="pg"
|
||||
)
|
||||
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||
query.options = {}
|
||||
origin_hash = query.query_hash
|
||||
query.update_query_hash()
|
||||
self.assertEqual(origin_hash, query.query_hash)
|
||||
|
||||
def test_update_query_hash_non_basesql(self):
|
||||
ds = self.factory.create_data_source(
|
||||
group=self.factory.org.default_group, type="prometheus"
|
||||
)
|
||||
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||
query.options = {"apply_auto_limit": True}
|
||||
origin_hash = query.query_hash
|
||||
query.update_query_hash()
|
||||
self.assertEqual(origin_hash, query.query_hash)
|
||||
|
||||
|
||||
|
||||
class TestGroup(BaseTestCase):
|
||||
def test_returns_groups_with_specified_names(self):
|
||||
|
||||
41
tests/utils/test_query_limit.py
Normal file
41
tests/utils/test_query_limit.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import unittest
|
||||
|
||||
from redash.utils import query_is_select_no_limit, add_limit_to_query
|
||||
|
||||
|
||||
class TestQueryLimit(unittest.TestCase):
|
||||
def test_check_query_limit_no_limit(self):
|
||||
query = "SELECT *"
|
||||
self.assertEqual(True, query_is_select_no_limit(query))
|
||||
|
||||
def test_check_query_limit_non_select(self):
|
||||
query = "Create Table (PersonID INT)"
|
||||
self.assertEqual(False, query_is_select_no_limit(query))
|
||||
|
||||
def test_check_query_limit_invalid_1(self):
|
||||
query = "OFFSET 5"
|
||||
self.assertEqual(False, query_is_select_no_limit(query))
|
||||
|
||||
def test_check_query_limit_invalid_2(self):
|
||||
query = "TABLE A FROM TABLE B"
|
||||
self.assertEqual(False, query_is_select_no_limit(query))
|
||||
|
||||
def test_check_query_with_limit(self):
|
||||
query = "SELECT * LIMIT 5"
|
||||
self.assertEqual(False, query_is_select_no_limit(query))
|
||||
|
||||
def test_check_query_with_offset(self):
|
||||
query = "SELECT * LIMIT 5 OFFSET 3"
|
||||
self.assertEqual(False, query_is_select_no_limit(query))
|
||||
|
||||
def test_add_limit_query_no_limit(self):
|
||||
query = "SELECT *"
|
||||
self.assertEqual("SELECT * LIMIT 1000", add_limit_to_query(query))
|
||||
|
||||
def test_add_limit_query_with_punc(self):
|
||||
query = "SELECT *;"
|
||||
self.assertEqual("SELECT * LIMIT 1000;", add_limit_to_query(query))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
39
viz-lib/src/lib/referenceCountingCache.js
Normal file
39
viz-lib/src/lib/referenceCountingCache.js
Normal file
@@ -0,0 +1,39 @@
|
||||
import { each, debounce } from "lodash";
|
||||
|
||||
export default function createReferenceCountingCache({ cleanupDelay = 2000 } = {}) {
|
||||
const items = {};
|
||||
|
||||
const cleanup = debounce(() => {
|
||||
each(items, (item, key) => {
|
||||
if (item.refCount <= 0) {
|
||||
delete items[key];
|
||||
}
|
||||
});
|
||||
}, cleanupDelay);
|
||||
|
||||
function get(key, getter) {
|
||||
if (!items[key]) {
|
||||
items[key] = {
|
||||
value: getter(),
|
||||
refCount: 0,
|
||||
};
|
||||
}
|
||||
const item = items[key];
|
||||
item.refCount += 1;
|
||||
return item.value;
|
||||
}
|
||||
|
||||
function release(key) {
|
||||
if (items[key]) {
|
||||
const item = items[key];
|
||||
if (item.refCount > 0) {
|
||||
item.refCount -= 1;
|
||||
if (item.refCount <= 0) {
|
||||
cleanup();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { get, release };
|
||||
}
|
||||
@@ -18,4 +18,6 @@ DOMPurify.addHook("afterSanitizeAttributes", function(node) {
|
||||
}
|
||||
});
|
||||
|
||||
export { DOMPurify };
|
||||
|
||||
export default DOMPurify.sanitize;
|
||||
|
||||
@@ -32,6 +32,16 @@ export default function YAxisSettings({ options, onOptionsChange }) {
|
||||
onChange={axis => onOptionsChange({ yAxis: [leftYAxis, axis] })}
|
||||
/>
|
||||
</Section>
|
||||
|
||||
<Section>
|
||||
<Switch
|
||||
id="chart-editor-y-axis-align-at-zero"
|
||||
data-test="Chart.YAxis.AlignAtZero"
|
||||
defaultChecked={options.alignYAxesAtZero}
|
||||
onChange={alignYAxesAtZero => onOptionsChange({ alignYAxesAtZero })}>
|
||||
Align Y Axes at Zero
|
||||
</Switch>
|
||||
</Section>
|
||||
</React.Fragment>
|
||||
)}
|
||||
|
||||
|
||||
@@ -5,8 +5,9 @@ const DEFAULT_OPTIONS = {
|
||||
globalSeriesType: "column",
|
||||
sortX: true,
|
||||
legend: { enabled: true, placement: "auto", traceorder: "normal" },
|
||||
yAxis: [{ type: "linear" }, { type: "linear", opposite: true }],
|
||||
xAxis: { type: "-", labels: { enabled: true } },
|
||||
yAxis: [{ type: "linear" }, { type: "linear", opposite: true }],
|
||||
alignYAxesAtZero: false,
|
||||
error_y: { type: "data", visible: true },
|
||||
series: { stacking: null, error_y: { type: "data", visible: true } },
|
||||
seriesOptions: {},
|
||||
|
||||
@@ -4,6 +4,42 @@ function calculateAxisRange(range, min, max) {
|
||||
return [isNumber(min) ? min : range[0], isNumber(max) ? max : range[1]];
|
||||
}
|
||||
|
||||
function calculateAbsoluteDiff(value, totalRange, percentageDiff) {
|
||||
return (percentageDiff * totalRange) / (1 - Math.abs(value) / totalRange - percentageDiff);
|
||||
}
|
||||
|
||||
function alignYAxesAtZero(axisA, axisB) {
|
||||
// Make sure the origin is included in both axes
|
||||
axisA.range[1] = Math.max(0, axisA.range[1]);
|
||||
axisB.range[1] = Math.max(0, axisB.range[1]);
|
||||
axisA.range[0] = Math.min(0, axisA.range[0]);
|
||||
axisB.range[0] = Math.min(0, axisB.range[0]);
|
||||
|
||||
const totalRangeA = axisA.range[1] - axisA.range[0];
|
||||
const proportionA = axisA.range[1] / totalRangeA;
|
||||
const totalRangeB = axisB.range[1] - axisB.range[0];
|
||||
const proportionB = axisB.range[1] / totalRangeB;
|
||||
|
||||
// Calculate the difference between the proportions and distribute them within the two axes
|
||||
const diff = Math.abs(proportionB - proportionA) / 2;
|
||||
|
||||
// Don't do anything if the difference is too low
|
||||
if (diff < 0.01) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Select the two that will correct the proportion by always augmenting, so the chart is not cut
|
||||
if (proportionA < proportionB) {
|
||||
// increase axisA max and axisB min
|
||||
axisA.range[1] += calculateAbsoluteDiff(axisA.range[1], totalRangeA, diff);
|
||||
axisB.range[0] -= calculateAbsoluteDiff(axisA.range[0], totalRangeB, diff);
|
||||
} else {
|
||||
// increase axisB max and axisA min
|
||||
axisB.range[1] += calculateAbsoluteDiff(axisB.range[1], totalRangeB, diff);
|
||||
axisA.range[0] -= calculateAbsoluteDiff(axisA.range[0], totalRangeA, diff);
|
||||
}
|
||||
}
|
||||
|
||||
export default function updateYRanges(plotlyElement, layout, options) {
|
||||
const updates = {};
|
||||
if (isObject(layout.yaxis)) {
|
||||
@@ -38,6 +74,10 @@ export default function updateYRanges(plotlyElement, layout, options) {
|
||||
updates.yaxis2.range = calculateAxisRange(defaultRange, axisOptions.rangeMin, axisOptions.rangeMax);
|
||||
}
|
||||
|
||||
if (options.alignYAxesAtZero && isObject(layout.yaxis) && isObject(layout.yaxis2)) {
|
||||
alignYAxesAtZero(updates.yaxis, updates.yaxis2);
|
||||
}
|
||||
|
||||
return [updates, null]; // no further updates
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import { isFinite, cloneDeep } from "lodash";
|
||||
import { isArray, isFinite, cloneDeep } from "lodash";
|
||||
import React, { useState, useEffect, useCallback } from "react";
|
||||
import { useDebouncedCallback } from "use-debounce";
|
||||
import * as Grid from "antd/lib/grid";
|
||||
import { Section, InputNumber, ControlLabel } from "@/components/visualizations/editor";
|
||||
import { EditorPropTypes } from "@/visualizations/prop-types";
|
||||
|
||||
import useLoadGeoJson from "../hooks/useLoadGeoJson";
|
||||
import { getGeoJsonBounds } from "./utils";
|
||||
|
||||
export default function BoundsSettings({ options, onOptionsChange }) {
|
||||
// Bounds may be changed in editor or on preview (by drag/zoom map).
|
||||
// Changes from preview does not come frequently (only when user release mouse button),
|
||||
@@ -16,9 +19,20 @@ export default function BoundsSettings({ options, onOptionsChange }) {
|
||||
const [bounds, setBounds] = useState(options.bounds);
|
||||
const [onOptionsChangeDebounced] = useDebouncedCallback(onOptionsChange, 200);
|
||||
|
||||
const [geoJson] = useLoadGeoJson(options.mapType);
|
||||
|
||||
// `options.bounds` could be empty only if user didn't edit bounds yet - through preview or in this editor.
|
||||
// In this case we should keep empty bounds value because it tells renderer to fit map every time.
|
||||
useEffect(() => {
|
||||
setBounds(options.bounds);
|
||||
}, [options.bounds]);
|
||||
if (options.bounds) {
|
||||
setBounds(options.bounds);
|
||||
} else {
|
||||
const defaultBounds = getGeoJsonBounds(geoJson);
|
||||
if (defaultBounds) {
|
||||
setBounds(defaultBounds);
|
||||
}
|
||||
}
|
||||
}, [options.bounds, geoJson]);
|
||||
|
||||
const updateBounds = useCallback(
|
||||
(i, j, v) => {
|
||||
@@ -33,29 +47,47 @@ export default function BoundsSettings({ options, onOptionsChange }) {
|
||||
[bounds, onOptionsChangeDebounced]
|
||||
);
|
||||
|
||||
const boundsAvailable = isArray(bounds);
|
||||
|
||||
return (
|
||||
<React.Fragment>
|
||||
<Section>
|
||||
<ControlLabel label="North-East latitude and longitude">
|
||||
<ControlLabel label="North-East Latitude and Longitude">
|
||||
<Grid.Row gutter={15}>
|
||||
<Grid.Col span={12}>
|
||||
<InputNumber value={bounds[1][0]} onChange={value => updateBounds(1, 0, value)} />
|
||||
<InputNumber
|
||||
disabled={!boundsAvailable}
|
||||
value={boundsAvailable ? bounds[1][0] : undefined}
|
||||
onChange={value => updateBounds(1, 0, value)}
|
||||
/>
|
||||
</Grid.Col>
|
||||
<Grid.Col span={12}>
|
||||
<InputNumber value={bounds[1][1]} onChange={value => updateBounds(1, 1, value)} />
|
||||
<InputNumber
|
||||
disabled={!boundsAvailable}
|
||||
value={boundsAvailable ? bounds[1][1] : undefined}
|
||||
onChange={value => updateBounds(1, 1, value)}
|
||||
/>
|
||||
</Grid.Col>
|
||||
</Grid.Row>
|
||||
</ControlLabel>
|
||||
</Section>
|
||||
|
||||
<Section>
|
||||
<ControlLabel label="South-West latitude and longitude">
|
||||
<ControlLabel label="South-West Latitude and Longitude">
|
||||
<Grid.Row gutter={15}>
|
||||
<Grid.Col span={12}>
|
||||
<InputNumber value={bounds[0][0]} onChange={value => updateBounds(0, 0, value)} />
|
||||
<InputNumber
|
||||
disabled={!boundsAvailable}
|
||||
value={boundsAvailable ? bounds[0][0] : undefined}
|
||||
onChange={value => updateBounds(0, 0, value)}
|
||||
/>
|
||||
</Grid.Col>
|
||||
<Grid.Col span={12}>
|
||||
<InputNumber value={bounds[0][1]} onChange={value => updateBounds(0, 1, value)} />
|
||||
<InputNumber
|
||||
disabled={!boundsAvailable}
|
||||
value={boundsAvailable ? bounds[0][1] : undefined}
|
||||
onChange={value => updateBounds(0, 1, value)}
|
||||
/>
|
||||
</Grid.Col>
|
||||
</Grid.Row>
|
||||
</ControlLabel>
|
||||
|
||||
@@ -12,7 +12,7 @@ export default function ColorsSettings({ options, onOptionsChange }) {
|
||||
<Section>
|
||||
<Select
|
||||
layout="horizontal"
|
||||
label="Clustering mode"
|
||||
label="Clustering Mode"
|
||||
data-test="Choropleth.Editor.ClusteringMode"
|
||||
defaultValue={options.clusteringMode}
|
||||
onChange={clusteringMode => onOptionsChange({ clusteringMode })}>
|
||||
@@ -71,7 +71,7 @@ export default function ColorsSettings({ options, onOptionsChange }) {
|
||||
<Section>
|
||||
<ColorPicker
|
||||
layout="horizontal"
|
||||
label="No value color"
|
||||
label="No Value Color"
|
||||
interactive
|
||||
presetColors={ColorPalette}
|
||||
placement="topRight"
|
||||
@@ -85,7 +85,7 @@ export default function ColorsSettings({ options, onOptionsChange }) {
|
||||
<Section>
|
||||
<ColorPicker
|
||||
layout="horizontal"
|
||||
label="Background color"
|
||||
label="Background Color"
|
||||
interactive
|
||||
presetColors={ColorPalette}
|
||||
placement="topRight"
|
||||
@@ -99,7 +99,7 @@ export default function ColorsSettings({ options, onOptionsChange }) {
|
||||
<Section>
|
||||
<ColorPicker
|
||||
layout="horizontal"
|
||||
label="Borders color"
|
||||
label="Borders Color"
|
||||
interactive
|
||||
presetColors={ColorPalette}
|
||||
placement="topRight"
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import React from "react";
|
||||
import { map } from "lodash";
|
||||
import React, { useMemo } from "react";
|
||||
import PropTypes from "prop-types";
|
||||
import { useDebouncedCallback } from "use-debounce";
|
||||
import * as Grid from "antd/lib/grid";
|
||||
import {
|
||||
@@ -12,49 +14,29 @@ import {
|
||||
} from "@/components/visualizations/editor";
|
||||
import { EditorPropTypes } from "@/visualizations/prop-types";
|
||||
|
||||
function TemplateFormatHint({ mapType }) {
|
||||
// eslint-disable-line react/prop-types
|
||||
import useLoadGeoJson from "../hooks/useLoadGeoJson";
|
||||
import { getGeoJsonFields } from "./utils";
|
||||
|
||||
function TemplateFormatHint({ geoJsonProperties }) {
|
||||
return (
|
||||
<ContextHelp placement="topLeft" arrowPointAtCenter>
|
||||
<div style={{ paddingBottom: 5 }}>
|
||||
All query result columns can be referenced using <code>{"{{ column_name }}"}</code> syntax.
|
||||
<div>
|
||||
All query result columns can be referenced using <code>{"{{ column_name }}"}</code> syntax.
|
||||
</div>
|
||||
<div>
|
||||
Use <code>{"{{ @@value }}"}</code> to access formatted value.
|
||||
</div>
|
||||
</div>
|
||||
<div style={{ paddingBottom: 5 }}>Use special names to access additional properties:</div>
|
||||
<div>
|
||||
<code>{"{{ @@value }}"}</code> formatted value;
|
||||
</div>
|
||||
{mapType === "countries" && (
|
||||
{geoJsonProperties.length > 0 && (
|
||||
<React.Fragment>
|
||||
<div>
|
||||
<code>{"{{ @@name }}"}</code> short country name;
|
||||
</div>
|
||||
<div>
|
||||
<code>{"{{ @@name_long }}"}</code> full country name;
|
||||
</div>
|
||||
<div>
|
||||
<code>{"{{ @@abbrev }}"}</code> abbreviated country name;
|
||||
</div>
|
||||
<div>
|
||||
<code>{"{{ @@iso_a2 }}"}</code> two-letter ISO country code;
|
||||
</div>
|
||||
<div>
|
||||
<code>{"{{ @@iso_a3 }}"}</code> three-letter ISO country code;
|
||||
</div>
|
||||
<div>
|
||||
<code>{"{{ @@iso_n3 }}"}</code> three-digit ISO country code.
|
||||
</div>
|
||||
</React.Fragment>
|
||||
)}
|
||||
{mapType === "subdiv_japan" && (
|
||||
<React.Fragment>
|
||||
<div>
|
||||
<code>{"{{ @@name }}"}</code> Prefecture name in English;
|
||||
</div>
|
||||
<div>
|
||||
<code>{"{{ @@name_local }}"}</code> Prefecture name in Kanji;
|
||||
</div>
|
||||
<div>
|
||||
<code>{"{{ @@iso_3166_2 }}"}</code> five-letter ISO subdivision code (JP-xx);
|
||||
<div className="p-b-5">GeoJSON properties could be accessed by these names:</div>
|
||||
<div style={{ maxHeight: 300, overflow: "auto" }}>
|
||||
{map(geoJsonProperties, property => (
|
||||
<div key={property}>
|
||||
<code>{`{{ @@${property}}}`}</code>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</React.Fragment>
|
||||
)}
|
||||
@@ -62,10 +44,20 @@ function TemplateFormatHint({ mapType }) {
|
||||
);
|
||||
}
|
||||
|
||||
TemplateFormatHint.propTypes = {
|
||||
geoJsonProperties: PropTypes.arrayOf(PropTypes.string),
|
||||
};
|
||||
|
||||
TemplateFormatHint.defaultProps = {
|
||||
geoJsonProperties: [],
|
||||
};
|
||||
|
||||
export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
const [onOptionsChangeDebounced] = useDebouncedCallback(onOptionsChange, 200);
|
||||
const [geoJson] = useLoadGeoJson(options.mapType);
|
||||
const geoJsonFields = useMemo(() => getGeoJsonFields(geoJson), [geoJson]);
|
||||
|
||||
const templateFormatHint = <TemplateFormatHint mapType={options.mapType} />;
|
||||
const templateFormatHint = <TemplateFormatHint geoJsonProperties={geoJsonFields} />;
|
||||
|
||||
return (
|
||||
<div className="choropleth-visualization-editor-format-settings">
|
||||
@@ -75,7 +67,7 @@ export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
<Input
|
||||
label={
|
||||
<React.Fragment>
|
||||
Value format
|
||||
Value Format
|
||||
<ContextHelp.NumberFormatSpecs />
|
||||
</React.Fragment>
|
||||
}
|
||||
@@ -86,7 +78,7 @@ export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
</Grid.Col>
|
||||
<Grid.Col span={12}>
|
||||
<Input
|
||||
label="Value placeholder"
|
||||
label="Value Placeholder"
|
||||
data-test="Choropleth.Editor.ValuePlaceholder"
|
||||
defaultValue={options.noValuePlaceholder}
|
||||
onChange={event => onOptionsChangeDebounced({ noValuePlaceholder: event.target.value })}
|
||||
@@ -100,7 +92,7 @@ export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
data-test="Choropleth.Editor.LegendVisibility"
|
||||
checked={options.legend.visible}
|
||||
onChange={event => onOptionsChange({ legend: { visible: event.target.checked } })}>
|
||||
Show legend
|
||||
Show Legend
|
||||
</Checkbox>
|
||||
</Section>
|
||||
|
||||
@@ -108,7 +100,7 @@ export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
<Grid.Row gutter={15}>
|
||||
<Grid.Col span={12}>
|
||||
<Select
|
||||
label="Legend position"
|
||||
label="Legend Position"
|
||||
data-test="Choropleth.Editor.LegendPosition"
|
||||
disabled={!options.legend.visible}
|
||||
defaultValue={options.legend.position}
|
||||
@@ -130,7 +122,7 @@ export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
<Grid.Col span={12}>
|
||||
<TextAlignmentSelect
|
||||
data-test="Choropleth.Editor.LegendTextAlignment"
|
||||
label="Legend text alignment"
|
||||
label="Legend Text Alignment"
|
||||
disabled={!options.legend.visible}
|
||||
defaultValue={options.legend.alignText}
|
||||
onChange={event => onOptionsChange({ legend: { alignText: event.target.value } })}
|
||||
@@ -144,13 +136,13 @@ export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
data-test="Choropleth.Editor.TooltipEnabled"
|
||||
checked={options.tooltip.enabled}
|
||||
onChange={event => onOptionsChange({ tooltip: { enabled: event.target.checked } })}>
|
||||
Show tooltip
|
||||
Show Tooltip
|
||||
</Checkbox>
|
||||
</Section>
|
||||
|
||||
<Section>
|
||||
<Input
|
||||
label={<React.Fragment>Tooltip template {templateFormatHint}</React.Fragment>}
|
||||
label={<React.Fragment>Tooltip Template {templateFormatHint}</React.Fragment>}
|
||||
data-test="Choropleth.Editor.TooltipTemplate"
|
||||
disabled={!options.tooltip.enabled}
|
||||
defaultValue={options.tooltip.template}
|
||||
@@ -163,13 +155,13 @@ export default function GeneralSettings({ options, onOptionsChange }) {
|
||||
data-test="Choropleth.Editor.PopupEnabled"
|
||||
checked={options.popup.enabled}
|
||||
onChange={event => onOptionsChange({ popup: { enabled: event.target.checked } })}>
|
||||
Show popup
|
||||
Show Popup
|
||||
</Checkbox>
|
||||
</Section>
|
||||
|
||||
<Section>
|
||||
<TextArea
|
||||
label={<React.Fragment>Popup template {templateFormatHint}</React.Fragment>}
|
||||
label={<React.Fragment>Popup Template {templateFormatHint}</React.Fragment>}
|
||||
data-test="Choropleth.Editor.PopupTemplate"
|
||||
disabled={!options.popup.enabled}
|
||||
rows={4}
|
||||
|
||||
@@ -1,91 +1,86 @@
|
||||
import { map } from "lodash";
|
||||
import React, { useMemo } from "react";
|
||||
import { isString, map, filter, get } from "lodash";
|
||||
import React, { useMemo, useCallback } from "react";
|
||||
import * as Grid from "antd/lib/grid";
|
||||
import { EditorPropTypes } from "@/visualizations/prop-types";
|
||||
import { Section, Select } from "@/components/visualizations/editor";
|
||||
import { inferCountryCodeType } from "./utils";
|
||||
import { visualizationsSettings } from "@/visualizations/visualizationsSettings";
|
||||
|
||||
import useLoadGeoJson from "../hooks/useLoadGeoJson";
|
||||
import { getGeoJsonFields } from "./utils";
|
||||
|
||||
export default function GeneralSettings({ options, data, onOptionsChange }) {
|
||||
const countryCodeTypes = useMemo(() => {
|
||||
switch (options.mapType) {
|
||||
case "countries":
|
||||
return {
|
||||
name: "Short name",
|
||||
name_long: "Full name",
|
||||
abbrev: "Abbreviated name",
|
||||
iso_a2: "ISO code (2 letters)",
|
||||
iso_a3: "ISO code (3 letters)",
|
||||
iso_n3: "ISO code (3 digits)",
|
||||
};
|
||||
case "subdiv_japan":
|
||||
return {
|
||||
name: "Name",
|
||||
name_local: "Name (local)",
|
||||
iso_3166_2: "ISO-3166-2",
|
||||
};
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}, [options.mapType]);
|
||||
const [geoJson, isLoadingGeoJson] = useLoadGeoJson(options.mapType);
|
||||
const geoJsonFields = useMemo(() => getGeoJsonFields(geoJson), [geoJson]);
|
||||
|
||||
const handleChangeAndInferType = newOptions => {
|
||||
newOptions.countryCodeType =
|
||||
inferCountryCodeType(
|
||||
newOptions.mapType || options.mapType,
|
||||
data ? data.rows : [],
|
||||
newOptions.countryCodeColumn || options.countryCodeColumn
|
||||
) || options.countryCodeType;
|
||||
onOptionsChange(newOptions);
|
||||
};
|
||||
// While geoJson is loading - show last selected field in select
|
||||
const targetFields = isLoadingGeoJson ? filter([options.targetField], isString) : geoJsonFields;
|
||||
|
||||
const fieldNames = get(visualizationsSettings, `choroplethAvailableMaps.${options.mapType}.fieldNames`, {});
|
||||
|
||||
const handleMapChange = useCallback(
|
||||
mapType => {
|
||||
onOptionsChange({ mapType: mapType || null });
|
||||
},
|
||||
[onOptionsChange]
|
||||
);
|
||||
|
||||
return (
|
||||
<React.Fragment>
|
||||
<Section>
|
||||
<Select
|
||||
label="Map type"
|
||||
label="Map"
|
||||
data-test="Choropleth.Editor.MapType"
|
||||
defaultValue={options.mapType}
|
||||
onChange={mapType => handleChangeAndInferType({ mapType })}>
|
||||
<Select.Option key="countries" data-test="Choropleth.Editor.MapType.Countries">
|
||||
Countries
|
||||
</Select.Option>
|
||||
<Select.Option key="subdiv_japan" data-test="Choropleth.Editor.MapType.Japan">
|
||||
Japan/Prefectures
|
||||
</Select.Option>
|
||||
</Select>
|
||||
</Section>
|
||||
|
||||
<Section>
|
||||
<Select
|
||||
label="Key column"
|
||||
data-test="Choropleth.Editor.KeyColumn"
|
||||
defaultValue={options.countryCodeColumn}
|
||||
onChange={countryCodeColumn => handleChangeAndInferType({ countryCodeColumn })}>
|
||||
{map(data.columns, ({ name }) => (
|
||||
<Select.Option key={name} data-test={`Choropleth.Editor.KeyColumn.${name}`}>
|
||||
{name}
|
||||
onChange={handleMapChange}>
|
||||
{map(visualizationsSettings.choroplethAvailableMaps, (_, mapType) => (
|
||||
<Select.Option key={mapType} data-test={`Choropleth.Editor.MapType.${mapType}`}>
|
||||
{get(visualizationsSettings, `choroplethAvailableMaps.${mapType}.name`, mapType)}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Section>
|
||||
|
||||
<Section>
|
||||
<Select
|
||||
label="Key type"
|
||||
data-test="Choropleth.Editor.KeyType"
|
||||
value={options.countryCodeType}
|
||||
onChange={countryCodeType => onOptionsChange({ countryCodeType })}>
|
||||
{map(countryCodeTypes, (name, type) => (
|
||||
<Select.Option key={type} data-test={`Choropleth.Editor.KeyType.${type}`}>
|
||||
{name}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
<Grid.Row gutter={15}>
|
||||
<Grid.Col span={12}>
|
||||
<Select
|
||||
label="Key Column"
|
||||
className="w-100"
|
||||
data-test="Choropleth.Editor.KeyColumn"
|
||||
disabled={data.columns.length === 0}
|
||||
defaultValue={options.keyColumn}
|
||||
onChange={keyColumn => onOptionsChange({ keyColumn })}>
|
||||
{map(data.columns, ({ name }) => (
|
||||
<Select.Option key={name} data-test={`Choropleth.Editor.KeyColumn.${name}`}>
|
||||
{name}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Grid.Col>
|
||||
<Grid.Col span={12}>
|
||||
<Select
|
||||
label="Target Field"
|
||||
className="w-100"
|
||||
data-test="Choropleth.Editor.TargetField"
|
||||
disabled={isLoadingGeoJson || targetFields.length === 0}
|
||||
loading={isLoadingGeoJson}
|
||||
value={options.targetField}
|
||||
onChange={targetField => onOptionsChange({ targetField })}>
|
||||
{map(targetFields, field => (
|
||||
<Select.Option key={field} data-test={`Choropleth.Editor.TargetField.${field}`}>
|
||||
{fieldNames[field] || field}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select>
|
||||
</Grid.Col>
|
||||
</Grid.Row>
|
||||
</Section>
|
||||
|
||||
<Section>
|
||||
<Select
|
||||
label="Value column"
|
||||
label="Value Column"
|
||||
data-test="Choropleth.Editor.ValueColumn"
|
||||
disabled={data.columns.length === 0}
|
||||
defaultValue={options.valueColumn}
|
||||
onChange={valueColumn => onOptionsChange({ valueColumn })}>
|
||||
{map(data.columns, ({ name }) => (
|
||||
|
||||
@@ -1,38 +1,28 @@
|
||||
/* eslint-disable import/prefer-default-export */
|
||||
import { isObject, isArray, reduce, keys, uniq } from "lodash";
|
||||
import L from "leaflet";
|
||||
|
||||
import _ from "lodash";
|
||||
|
||||
export function inferCountryCodeType(mapType, data, countryCodeField) {
|
||||
const regexMap = {
|
||||
countries: {
|
||||
iso_a2: /^[a-z]{2}$/i,
|
||||
iso_a3: /^[a-z]{3}$/i,
|
||||
iso_n3: /^[0-9]{3}$/i,
|
||||
export function getGeoJsonFields(geoJson) {
|
||||
const features = isObject(geoJson) && isArray(geoJson.features) ? geoJson.features : [];
|
||||
return reduce(
|
||||
features,
|
||||
(result, feature) => {
|
||||
const properties = isObject(feature) && isObject(feature.properties) ? feature.properties : {};
|
||||
return uniq([...result, ...keys(properties)]);
|
||||
},
|
||||
subdiv_japan: {
|
||||
name: /^[a-z]+$/i,
|
||||
name_local: /^[\u3400-\u9FFF\uF900-\uFAFF]|[\uD840-\uD87F][\uDC00-\uDFFF]+$/i,
|
||||
iso_3166_2: /^JP-[0-9]{2}$/i,
|
||||
},
|
||||
};
|
||||
|
||||
const regex = regexMap[mapType];
|
||||
|
||||
const initState = _.mapValues(regex, () => 0);
|
||||
|
||||
const result = _.chain(data)
|
||||
.reduce((memo, item) => {
|
||||
const value = item[countryCodeField];
|
||||
if (_.isString(value)) {
|
||||
_.each(regex, (r, k) => {
|
||||
memo[k] += r.test(value) ? 1 : 0;
|
||||
});
|
||||
}
|
||||
return memo;
|
||||
}, initState)
|
||||
.toPairs()
|
||||
.reduce((memo, item) => (item[1] > memo[1] ? item : memo))
|
||||
.value();
|
||||
|
||||
return result[1] / data.length >= 0.9 ? result[0] : null;
|
||||
[]
|
||||
);
|
||||
}
|
||||
|
||||
export function getGeoJsonBounds(geoJson) {
|
||||
if (isObject(geoJson)) {
|
||||
const layer = L.geoJSON(geoJson);
|
||||
const bounds = layer.getBounds();
|
||||
if (bounds.isValid()) {
|
||||
return [
|
||||
[bounds._southWest.lat, bounds._southWest.lng],
|
||||
[bounds._northEast.lat, bounds._northEast.lng],
|
||||
];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,43 +1,26 @@
|
||||
import { omit, merge, get } from "lodash";
|
||||
import axios from "axios";
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { omit, noop } from "lodash";
|
||||
import React, { useState, useEffect, useRef } from "react";
|
||||
import { RendererPropTypes } from "@/visualizations/prop-types";
|
||||
import useMemoWithDeepCompare from "@/lib/hooks/useMemoWithDeepCompare";
|
||||
import { visualizationsSettings } from "@/visualizations/visualizationsSettings";
|
||||
|
||||
import useLoadGeoJson from "../hooks/useLoadGeoJson";
|
||||
import initChoropleth from "./initChoropleth";
|
||||
import { prepareData } from "./utils";
|
||||
import "./renderer.less";
|
||||
|
||||
function getDataUrl(type) {
|
||||
return get(visualizationsSettings, `choroplethAvailableMaps.${type}.url`, undefined);
|
||||
}
|
||||
|
||||
export default function Renderer({ data, options, onOptionsChange }) {
|
||||
const [container, setContainer] = useState(null);
|
||||
const [geoJson, setGeoJson] = useState(null);
|
||||
const [geoJson] = useLoadGeoJson(options.mapType);
|
||||
const onBoundsChangeRef = useRef();
|
||||
onBoundsChangeRef.current = onOptionsChange ? bounds => onOptionsChange({ ...options, bounds }) : noop;
|
||||
|
||||
const optionsWithoutBounds = useMemoWithDeepCompare(() => omit(options, ["bounds"]), [options]);
|
||||
|
||||
const [map, setMap] = useState(null);
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
|
||||
axios.get(getDataUrl(options.mapType)).then(({ data }) => {
|
||||
if (!cancelled) {
|
||||
setGeoJson(data);
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [options.mapType]);
|
||||
|
||||
useEffect(() => {
|
||||
if (container) {
|
||||
const _map = initChoropleth(container);
|
||||
const _map = initChoropleth(container, (...args) => onBoundsChangeRef.current(...args));
|
||||
setMap(_map);
|
||||
return () => {
|
||||
_map.destroy();
|
||||
@@ -49,24 +32,17 @@ export default function Renderer({ data, options, onOptionsChange }) {
|
||||
if (map) {
|
||||
map.updateLayers(
|
||||
geoJson,
|
||||
prepareData(data.rows, optionsWithoutBounds.countryCodeColumn, optionsWithoutBounds.valueColumn),
|
||||
prepareData(data.rows, optionsWithoutBounds.keyColumn, optionsWithoutBounds.valueColumn),
|
||||
options // detect changes for all options except bounds, but pass them all!
|
||||
);
|
||||
}
|
||||
}, [map, geoJson, data.rows, optionsWithoutBounds]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
// This may come only from editor
|
||||
useEffect(() => {
|
||||
if (map) {
|
||||
map.updateBounds(options.bounds);
|
||||
}
|
||||
}, [map, options.bounds]);
|
||||
|
||||
useEffect(() => {
|
||||
if (map && onOptionsChange) {
|
||||
map.onBoundsChange = bounds => {
|
||||
onOptionsChange(merge({}, options, { bounds }));
|
||||
};
|
||||
}
|
||||
}, [map, options, onOptionsChange]);
|
||||
|
||||
return (
|
||||
|
||||
@@ -35,9 +35,9 @@ const CustomControl = L.Control.extend({
|
||||
});
|
||||
|
||||
function prepareLayer({ feature, layer, data, options, limits, colors, formatValue }) {
|
||||
const value = getValueForFeature(feature, data, options.countryCodeType);
|
||||
const value = getValueForFeature(feature, data, options.targetField);
|
||||
const valueFormatted = formatValue(value);
|
||||
const featureData = prepareFeatureProperties(feature, valueFormatted, data, options.countryCodeType);
|
||||
const featureData = prepareFeatureProperties(feature, valueFormatted, data, options.targetField);
|
||||
const color = getColorByValue(value, limits, colors, options.colors.noValue);
|
||||
|
||||
layer.setStyle({
|
||||
@@ -69,7 +69,20 @@ function prepareLayer({ feature, layer, data, options, limits, colors, formatVal
|
||||
});
|
||||
}
|
||||
|
||||
export default function initChoropleth(container) {
|
||||
function validateBounds(bounds, fallbackBounds) {
|
||||
if (bounds) {
|
||||
bounds = L.latLngBounds(bounds[0], bounds[1]);
|
||||
if (bounds.isValid()) {
|
||||
return bounds;
|
||||
}
|
||||
}
|
||||
if (fallbackBounds && fallbackBounds.isValid()) {
|
||||
return fallbackBounds;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export default function initChoropleth(container, onBoundsChange) {
|
||||
const _map = L.map(container, {
|
||||
center: [0.0, 0.0],
|
||||
zoom: 1,
|
||||
@@ -82,13 +95,14 @@ export default function initChoropleth(container) {
|
||||
let _choropleth = null;
|
||||
const _legend = new CustomControl();
|
||||
|
||||
let onBoundsChange = () => {};
|
||||
function handleMapBoundsChange() {
|
||||
const bounds = _map.getBounds();
|
||||
onBoundsChange([
|
||||
[bounds._southWest.lat, bounds._southWest.lng],
|
||||
[bounds._northEast.lat, bounds._northEast.lng],
|
||||
]);
|
||||
if (isFunction(onBoundsChange)) {
|
||||
const bounds = _map.getBounds();
|
||||
onBoundsChange([
|
||||
[bounds._southWest.lat, bounds._southWest.lng],
|
||||
[bounds._northEast.lat, bounds._northEast.lng],
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
let boundsChangedFromMap = false;
|
||||
@@ -123,14 +137,13 @@ export default function initChoropleth(container) {
|
||||
},
|
||||
}).addTo(_map);
|
||||
|
||||
const bounds = _choropleth.getBounds();
|
||||
_map.fitBounds(options.bounds || bounds, { animate: false, duration: 0 });
|
||||
_map.setMaxBounds(bounds);
|
||||
const mapBounds = _choropleth.getBounds();
|
||||
const bounds = validateBounds(options.bounds, mapBounds);
|
||||
_map.fitBounds(bounds, { animate: false, duration: 0 });
|
||||
|
||||
// send updated bounds to editor; delay this to avoid infinite update loop
|
||||
setTimeout(() => {
|
||||
handleMapBoundsChange();
|
||||
}, 10);
|
||||
// equivalent to `_map.setMaxBounds(mapBounds)` but without animation
|
||||
_map.options.maxBounds = mapBounds;
|
||||
_map.panInsideBounds(mapBounds, { animate: false, duration: 0 });
|
||||
|
||||
// update legend
|
||||
if (options.legend.visible && legend.length > 0) {
|
||||
@@ -149,8 +162,8 @@ export default function initChoropleth(container) {
|
||||
function updateBounds(bounds) {
|
||||
if (!boundsChangedFromMap) {
|
||||
const layerBounds = _choropleth ? _choropleth.getBounds() : _map.getBounds();
|
||||
bounds = bounds ? L.latLngBounds(bounds[0], bounds[1]) : layerBounds;
|
||||
if (bounds.isValid()) {
|
||||
bounds = validateBounds(bounds, layerBounds);
|
||||
if (bounds) {
|
||||
_map.fitBounds(bounds, { animate: false, duration: 0 });
|
||||
}
|
||||
}
|
||||
@@ -161,12 +174,6 @@ export default function initChoropleth(container) {
|
||||
});
|
||||
|
||||
return {
|
||||
get onBoundsChange() {
|
||||
return onBoundsChange;
|
||||
},
|
||||
set onBoundsChange(value) {
|
||||
onBoundsChange = isFunction(value) ? value : () => {};
|
||||
},
|
||||
updateLayers,
|
||||
updateBounds,
|
||||
destroy() {
|
||||
|
||||
@@ -18,17 +18,17 @@ export function createNumberFormatter(format, placeholder) {
|
||||
};
|
||||
}
|
||||
|
||||
export function prepareData(data, countryCodeField, valueField) {
|
||||
if (!countryCodeField || !valueField) {
|
||||
export function prepareData(data, keyColumn, valueColumn) {
|
||||
if (!keyColumn || !valueColumn) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const result = {};
|
||||
each(data, item => {
|
||||
if (item[countryCodeField]) {
|
||||
const value = parseFloat(item[valueField]);
|
||||
result[item[countryCodeField]] = {
|
||||
code: item[countryCodeField],
|
||||
if (item[keyColumn]) {
|
||||
const value = parseFloat(item[valueColumn]);
|
||||
result[item[keyColumn]] = {
|
||||
code: item[keyColumn],
|
||||
value: isFinite(value) ? value : undefined,
|
||||
item,
|
||||
};
|
||||
@@ -37,18 +37,18 @@ export function prepareData(data, countryCodeField, valueField) {
|
||||
return result;
|
||||
}
|
||||
|
||||
export function prepareFeatureProperties(feature, valueFormatted, data, countryCodeType) {
|
||||
export function prepareFeatureProperties(feature, valueFormatted, data, targetField) {
|
||||
const result = {};
|
||||
each(feature.properties, (value, key) => {
|
||||
result["@@" + key] = value;
|
||||
});
|
||||
result["@@value"] = valueFormatted;
|
||||
const datum = data[feature.properties[countryCodeType]] || {};
|
||||
const datum = data[feature.properties[targetField]] || {};
|
||||
return extend(result, datum.item);
|
||||
}
|
||||
|
||||
export function getValueForFeature(feature, data, countryCodeType) {
|
||||
const code = feature.properties[countryCodeType];
|
||||
export function getValueForFeature(feature, data, targetField) {
|
||||
const code = feature.properties[targetField];
|
||||
if (isString(code) && isObject(data[code])) {
|
||||
return data[code].value;
|
||||
}
|
||||
@@ -70,7 +70,7 @@ export function createScale(features, data, options) {
|
||||
// Calculate limits
|
||||
const values = uniq(
|
||||
filter(
|
||||
map(features, feature => getValueForFeature(feature, data, options.countryCodeType)),
|
||||
map(features, feature => getValueForFeature(feature, data, options.targetField)),
|
||||
isFinite
|
||||
)
|
||||
);
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
import { merge } from "lodash";
|
||||
import { isNil, merge, first, keys, get } from "lodash";
|
||||
import { visualizationsSettings } from "@/visualizations/visualizationsSettings";
|
||||
import ColorPalette from "./ColorPalette";
|
||||
|
||||
function getDefaultMap() {
|
||||
return first(keys(visualizationsSettings.choroplethAvailableMaps)) || null;
|
||||
}
|
||||
|
||||
const DEFAULT_OPTIONS = {
|
||||
mapType: "countries",
|
||||
countryCodeColumn: "",
|
||||
countryCodeType: "iso_a3",
|
||||
valueColumn: "",
|
||||
keyColumn: null,
|
||||
targetField: null,
|
||||
valueColumn: null,
|
||||
clusteringMode: "e",
|
||||
steps: 5,
|
||||
valueFormat: "0,0.00",
|
||||
@@ -33,5 +38,26 @@ const DEFAULT_OPTIONS = {
|
||||
};
|
||||
|
||||
export default function getOptions(options) {
|
||||
return merge({}, DEFAULT_OPTIONS, options);
|
||||
const result = merge({}, DEFAULT_OPTIONS, options);
|
||||
|
||||
// Both renderer and editor always provide new `bounds` array, so no need to clone it here.
|
||||
// Keeping original object also reduces amount of updates in components
|
||||
result.bounds = get(options, "bounds");
|
||||
|
||||
if (isNil(visualizationsSettings.choroplethAvailableMaps[result.mapType])) {
|
||||
result.mapType = getDefaultMap();
|
||||
}
|
||||
|
||||
// backward compatibility
|
||||
if (!isNil(result.countryCodeColumn)) {
|
||||
result.keyColumn = result.countryCodeColumn;
|
||||
}
|
||||
delete result.countryCodeColumn;
|
||||
|
||||
if (!isNil(result.countryCodeType)) {
|
||||
result.targetField = result.countryCodeType;
|
||||
}
|
||||
delete result.countryCodeType;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import { isString, isObject, get } from "lodash";
|
||||
import { useState, useEffect } from "react";
|
||||
import axios from "axios";
|
||||
import { visualizationsSettings } from "@/visualizations/visualizationsSettings";
|
||||
import createReferenceCountingCache from "@/lib/referenceCountingCache";
|
||||
|
||||
const cache = createReferenceCountingCache();
|
||||
|
||||
export default function useLoadGeoJson(mapType) {
|
||||
const [geoJson, setGeoJson] = useState(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const mapUrl = get(visualizationsSettings, `choroplethAvailableMaps.${mapType}.url`, undefined);
|
||||
|
||||
if (isString(mapUrl)) {
|
||||
setIsLoading(true);
|
||||
let cancelled = false;
|
||||
|
||||
const promise = cache.get(mapUrl, () => axios.get(mapUrl).catch(() => null));
|
||||
promise.then(({ data }) => {
|
||||
if (!cancelled) {
|
||||
setGeoJson(isObject(data) ? data : null);
|
||||
setIsLoading(false);
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
cancelled = true;
|
||||
cache.release(mapUrl);
|
||||
};
|
||||
} else {
|
||||
setGeoJson(null);
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [mapType]);
|
||||
|
||||
return [geoJson, isLoading];
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
// This helper converts USA map from Mercator projection to Albers (USA)
|
||||
// Usage: `node convert-projection.js > usa-albers.geo.json`
|
||||
|
||||
const { each, map, filter } = require("lodash");
|
||||
const d3 = require("d3");
|
||||
|
||||
const albersUSA = d3.geo.albersUsa();
|
||||
const mercator = d3.geo.mercator();
|
||||
|
||||
const geojson = require("./usa.geo.json");
|
||||
|
||||
function convertPoint(coord) {
|
||||
const pt = albersUSA(coord);
|
||||
return pt ? mercator.invert(pt) : null;
|
||||
}
|
||||
|
||||
function convertLineString(points) {
|
||||
return filter(map(points, convertPoint));
|
||||
}
|
||||
|
||||
function convertPolygon(polygon) {
|
||||
return map(polygon, convertLineString);
|
||||
}
|
||||
|
||||
function convertMultiPolygon(multiPolygon) {
|
||||
return map(multiPolygon, convertPolygon);
|
||||
}
|
||||
|
||||
each(geojson.features, feature => {
|
||||
switch (feature.geometry.type) {
|
||||
case "Polygon":
|
||||
feature.geometry.coordinates = convertPolygon(feature.geometry.coordinates);
|
||||
break;
|
||||
case "MultiPolygon":
|
||||
feature.geometry.coordinates = convertMultiPolygon(feature.geometry.coordinates);
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
console.log(JSON.stringify(geojson));
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1
viz-lib/src/visualizations/choropleth/maps/usa.geo.json
Normal file
1
viz-lib/src/visualizations/choropleth/maps/usa.geo.json
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user