mirror of
https://github.com/getredash/redash.git
synced 2025-12-20 01:47:39 -05:00
Compare commits
6 Commits
24.06.0-de
...
system-sta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d965bc2653 | ||
|
|
361308cb10 | ||
|
|
673c55609a | ||
|
|
97fc91f6e1 | ||
|
|
311ec78090 | ||
|
|
338c3b43e8 |
@@ -35,11 +35,11 @@ CounterCard.defaultProps = {
|
|||||||
|
|
||||||
const queryJobsColumns = [
|
const queryJobsColumns = [
|
||||||
{ title: "Queue", dataIndex: "origin" },
|
{ title: "Queue", dataIndex: "origin" },
|
||||||
{ title: "Query ID", dataIndex: "meta.query_id" },
|
{ title: "Query ID", dataIndex: ["meta", "query_id"] },
|
||||||
{ title: "Org ID", dataIndex: "meta.org_id" },
|
{ title: "Org ID", dataIndex: ["meta", "org_id"] },
|
||||||
{ title: "Data Source ID", dataIndex: "meta.data_source_id" },
|
{ title: "Data Source ID", dataIndex: ["meta", "data_source_id"] },
|
||||||
{ title: "User ID", dataIndex: "meta.user_id" },
|
{ title: "User ID", dataIndex: ["meta", "user_id"] },
|
||||||
Columns.custom(scheduled => scheduled.toString(), { title: "Scheduled", dataIndex: "meta.scheduled" }),
|
Columns.custom(scheduled => scheduled.toString(), { title: "Scheduled", dataIndex: ["meta", "scheduled"] }),
|
||||||
Columns.timeAgo({ title: "Start Time", dataIndex: "started_at" }),
|
Columns.timeAgo({ title: "Start Time", dataIndex: "started_at" }),
|
||||||
Columns.timeAgo({ title: "Enqueue Time", dataIndex: "enqueued_at" }),
|
Columns.timeAgo({ title: "Enqueue Time", dataIndex: "enqueued_at" }),
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -0,0 +1,37 @@
|
|||||||
|
import React, { useCallback } from "react";
|
||||||
|
import PropTypes from "prop-types";
|
||||||
|
import recordEvent from "@/services/recordEvent";
|
||||||
|
import Checkbox from "antd/lib/checkbox";
|
||||||
|
import Tooltip from "antd/lib/tooltip";
|
||||||
|
|
||||||
|
export default function AutoLimitCheckbox({ available, checked, onChange }) {
|
||||||
|
const handleClick = useCallback(() => {
|
||||||
|
recordEvent("checkbox_auto_limit", "screen", "query_editor", { state: !checked });
|
||||||
|
onChange(!checked);
|
||||||
|
}, [checked, onChange]);
|
||||||
|
|
||||||
|
let tooltipMessage = null;
|
||||||
|
if (!available) {
|
||||||
|
tooltipMessage = "Auto limiting is not available for this Data Source type.";
|
||||||
|
} else {
|
||||||
|
tooltipMessage = "Auto limit results to first 1000 rows.";
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Tooltip placement="top" title={tooltipMessage}>
|
||||||
|
<Checkbox
|
||||||
|
className="query-editor-controls-checkbox"
|
||||||
|
disabled={!available}
|
||||||
|
onClick={handleClick}
|
||||||
|
checked={available && checked}>
|
||||||
|
LIMIT 1000
|
||||||
|
</Checkbox>
|
||||||
|
</Tooltip>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
AutoLimitCheckbox.propTypes = {
|
||||||
|
available: PropTypes.bool,
|
||||||
|
checked: PropTypes.bool.isRequired,
|
||||||
|
onChange: PropTypes.func.isRequired,
|
||||||
|
};
|
||||||
@@ -8,6 +8,7 @@ import KeyboardShortcuts, { humanReadableShortcut } from "@/services/KeyboardSho
|
|||||||
|
|
||||||
import AutocompleteToggle from "./AutocompleteToggle";
|
import AutocompleteToggle from "./AutocompleteToggle";
|
||||||
import "./QueryEditorControls.less";
|
import "./QueryEditorControls.less";
|
||||||
|
import AutoLimitCheckbox from "@/components/queries/QueryEditor/AutoLimitCheckbox";
|
||||||
|
|
||||||
export function ButtonTooltip({ title, shortcut, ...props }) {
|
export function ButtonTooltip({ title, shortcut, ...props }) {
|
||||||
shortcut = humanReadableShortcut(shortcut, 1); // show only primary shortcut
|
shortcut = humanReadableShortcut(shortcut, 1); // show only primary shortcut
|
||||||
@@ -38,6 +39,7 @@ export default function EditorControl({
|
|||||||
saveButtonProps,
|
saveButtonProps,
|
||||||
executeButtonProps,
|
executeButtonProps,
|
||||||
autocompleteToggleProps,
|
autocompleteToggleProps,
|
||||||
|
autoLimitCheckboxProps,
|
||||||
dataSourceSelectorProps,
|
dataSourceSelectorProps,
|
||||||
}) {
|
}) {
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -84,6 +86,7 @@ export default function EditorControl({
|
|||||||
onToggle={autocompleteToggleProps.onToggle}
|
onToggle={autocompleteToggleProps.onToggle}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
{autoLimitCheckboxProps !== false && <AutoLimitCheckbox {...autoLimitCheckboxProps} />}
|
||||||
{dataSourceSelectorProps === false && <span className="query-editor-controls-spacer" />}
|
{dataSourceSelectorProps === false && <span className="query-editor-controls-spacer" />}
|
||||||
{dataSourceSelectorProps !== false && (
|
{dataSourceSelectorProps !== false && (
|
||||||
<Select
|
<Select
|
||||||
@@ -153,6 +156,10 @@ EditorControl.propTypes = {
|
|||||||
onToggle: PropTypes.func,
|
onToggle: PropTypes.func,
|
||||||
}),
|
}),
|
||||||
]),
|
]),
|
||||||
|
autoLimitCheckboxProps: PropTypes.oneOfType([
|
||||||
|
PropTypes.bool, // `false` to hide
|
||||||
|
PropTypes.shape(AutoLimitCheckbox.propTypes),
|
||||||
|
]),
|
||||||
dataSourceSelectorProps: PropTypes.oneOfType([
|
dataSourceSelectorProps: PropTypes.oneOfType([
|
||||||
PropTypes.bool, // `false` to hide
|
PropTypes.bool, // `false` to hide
|
||||||
PropTypes.shape({
|
PropTypes.shape({
|
||||||
@@ -175,5 +182,6 @@ EditorControl.defaultProps = {
|
|||||||
saveButtonProps: false,
|
saveButtonProps: false,
|
||||||
executeButtonProps: false,
|
executeButtonProps: false,
|
||||||
autocompleteToggleProps: false,
|
autocompleteToggleProps: false,
|
||||||
|
autoLimitCheckboxProps: false,
|
||||||
dataSourceSelectorProps: false,
|
dataSourceSelectorProps: false,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -21,6 +21,12 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.query-editor-controls-checkbox {
|
||||||
|
display: inline-block;
|
||||||
|
white-space: nowrap;
|
||||||
|
margin: auto 5px;
|
||||||
|
}
|
||||||
|
|
||||||
.query-editor-controls-spacer {
|
.query-editor-controls-spacer {
|
||||||
flex: 1 1 auto;
|
flex: 1 1 auto;
|
||||||
height: 35px; // same as Antd <Select>
|
height: 35px; // same as Antd <Select>
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ import { getEditorComponents } from "@/components/queries/editor-components";
|
|||||||
import useQuery from "./hooks/useQuery";
|
import useQuery from "./hooks/useQuery";
|
||||||
import useVisualizationTabHandler from "./hooks/useVisualizationTabHandler";
|
import useVisualizationTabHandler from "./hooks/useVisualizationTabHandler";
|
||||||
import useAutocompleteFlags from "./hooks/useAutocompleteFlags";
|
import useAutocompleteFlags from "./hooks/useAutocompleteFlags";
|
||||||
|
import useAutoLimitFlags from "./hooks/useAutoLimitFlags";
|
||||||
import useQueryExecute from "./hooks/useQueryExecute";
|
import useQueryExecute from "./hooks/useQueryExecute";
|
||||||
import useQueryResultData from "@/lib/useQueryResultData";
|
import useQueryResultData from "@/lib/useQueryResultData";
|
||||||
import useQueryDataSources from "./hooks/useQueryDataSources";
|
import useQueryDataSources from "./hooks/useQueryDataSources";
|
||||||
@@ -77,6 +78,7 @@ function QuerySource(props) {
|
|||||||
|
|
||||||
const editorRef = useRef(null);
|
const editorRef = useRef(null);
|
||||||
const [autocompleteAvailable, autocompleteEnabled, toggleAutocomplete] = useAutocompleteFlags(schema);
|
const [autocompleteAvailable, autocompleteEnabled, toggleAutocomplete] = useAutocompleteFlags(schema);
|
||||||
|
const [autoLimitAvailable, autoLimitChecked, setAutoLimit] = useAutoLimitFlags(dataSource, query, setQuery);
|
||||||
|
|
||||||
const [handleQueryEditorChange] = useDebouncedCallback(queryText => {
|
const [handleQueryEditorChange] = useDebouncedCallback(queryText => {
|
||||||
setQuery(extend(query.clone(), { query: queryText }));
|
setQuery(extend(query.clone(), { query: queryText }));
|
||||||
@@ -306,6 +308,11 @@ function QuerySource(props) {
|
|||||||
enabled: autocompleteEnabled,
|
enabled: autocompleteEnabled,
|
||||||
onToggle: toggleAutocomplete,
|
onToggle: toggleAutocomplete,
|
||||||
}}
|
}}
|
||||||
|
autoLimitCheckboxProps={{
|
||||||
|
available: autoLimitAvailable,
|
||||||
|
checked: autoLimitChecked,
|
||||||
|
onChange: setAutoLimit,
|
||||||
|
}}
|
||||||
dataSourceSelectorProps={
|
dataSourceSelectorProps={
|
||||||
dataSource
|
dataSource
|
||||||
? {
|
? {
|
||||||
|
|||||||
24
client/app/pages/queries/hooks/useAutoLimitFlags.js
Normal file
24
client/app/pages/queries/hooks/useAutoLimitFlags.js
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { useCallback, useState } from "react";
|
||||||
|
import localOptions from "@/lib/localOptions";
|
||||||
|
import { get, extend } from "lodash";
|
||||||
|
|
||||||
|
function isAutoLimitAvailable(dataSource) {
|
||||||
|
return get(dataSource, "supports_auto_limit", false);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function useAutoLimitFlags(dataSource, query, setQuery) {
|
||||||
|
const isAvailable = isAutoLimitAvailable(dataSource);
|
||||||
|
const [isChecked, setIsChecked] = useState(localOptions.get("applyAutoLimit", true));
|
||||||
|
query.options.apply_auto_limit = isAvailable && isChecked;
|
||||||
|
|
||||||
|
const setAutoLimit = useCallback(
|
||||||
|
state => {
|
||||||
|
setIsChecked(state);
|
||||||
|
localOptions.set("applyAutoLimit", state);
|
||||||
|
setQuery(extend(query.clone(), { options: { ...query.options, apply_auto_limit: isAvailable && state } }));
|
||||||
|
},
|
||||||
|
[query, setQuery, isAvailable]
|
||||||
|
);
|
||||||
|
|
||||||
|
return [isAvailable, isChecked, setAutoLimit];
|
||||||
|
}
|
||||||
@@ -435,11 +435,11 @@ class QueryResult {
|
|||||||
return `${queryName.replace(/ /g, "_") + moment(this.getUpdatedAt()).format("_YYYY_MM_DD")}.${fileType}`;
|
return `${queryName.replace(/ /g, "_") + moment(this.getUpdatedAt()).format("_YYYY_MM_DD")}.${fileType}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
static getByQueryId(id, parameters, maxAge) {
|
static getByQueryId(id, parameters, applyAutoLimit, maxAge) {
|
||||||
const queryResult = new QueryResult();
|
const queryResult = new QueryResult();
|
||||||
|
|
||||||
axios
|
axios
|
||||||
.post(`api/queries/${id}/results`, { id, parameters, max_age: maxAge })
|
.post(`api/queries/${id}/results`, { id, parameters, apply_auto_limit: applyAutoLimit, max_age: maxAge })
|
||||||
.then(response => {
|
.then(response => {
|
||||||
queryResult.update(response);
|
queryResult.update(response);
|
||||||
|
|
||||||
@@ -454,13 +454,14 @@ class QueryResult {
|
|||||||
return queryResult;
|
return queryResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
static get(dataSourceId, query, parameters, maxAge, queryId) {
|
static get(dataSourceId, query, parameters, applyAutoLimit, maxAge, queryId) {
|
||||||
const queryResult = new QueryResult();
|
const queryResult = new QueryResult();
|
||||||
|
|
||||||
const params = {
|
const params = {
|
||||||
data_source_id: dataSourceId,
|
data_source_id: dataSourceId,
|
||||||
parameters,
|
parameters,
|
||||||
query,
|
query,
|
||||||
|
apply_auto_limit: applyAutoLimit,
|
||||||
max_age: maxAge,
|
max_age: maxAge,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -130,7 +130,8 @@ export class Query {
|
|||||||
}
|
}
|
||||||
|
|
||||||
getQueryResult(maxAge) {
|
getQueryResult(maxAge) {
|
||||||
const execute = () => QueryResult.getByQueryId(this.id, this.getParameters().getExecutionValues(), maxAge);
|
const execute = () =>
|
||||||
|
QueryResult.getByQueryId(this.id, this.getParameters().getExecutionValues(), this.getAutoLimit(), maxAge);
|
||||||
return this.prepareQueryResultExecution(execute, maxAge);
|
return this.prepareQueryResultExecution(execute, maxAge);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,7 +142,8 @@ export class Query {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const parameters = this.getParameters().getExecutionValues({ joinListValues: true });
|
const parameters = this.getParameters().getExecutionValues({ joinListValues: true });
|
||||||
const execute = () => QueryResult.get(this.data_source_id, queryText, parameters, maxAge, this.id);
|
const execute = () =>
|
||||||
|
QueryResult.get(this.data_source_id, queryText, parameters, this.getAutoLimit(), maxAge, this.id);
|
||||||
return this.prepareQueryResultExecution(execute, maxAge);
|
return this.prepareQueryResultExecution(execute, maxAge);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -184,6 +186,10 @@ export class Query {
|
|||||||
return this.$parameters;
|
return this.$parameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getAutoLimit() {
|
||||||
|
return this.options.apply_auto_limit;
|
||||||
|
}
|
||||||
|
|
||||||
getParametersDefs(update = true) {
|
getParametersDefs(update = true) {
|
||||||
return this.getParameters().get(update);
|
return this.getParameters().get(update);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -488,9 +488,9 @@ class QueryRefreshResource(BaseResource):
|
|||||||
|
|
||||||
parameter_values = collect_parameters_from_request(request.args)
|
parameter_values = collect_parameters_from_request(request.args)
|
||||||
parameterized_query = ParameterizedQuery(query.query_text, org=self.current_org)
|
parameterized_query = ParameterizedQuery(query.query_text, org=self.current_org)
|
||||||
|
should_apply_auto_limit = query.options.get("apply_auto_limit", False)
|
||||||
return run_query(
|
return run_query(
|
||||||
parameterized_query, parameter_values, query.data_source, query.id
|
parameterized_query, parameter_values, query.data_source, query.id, should_apply_auto_limit
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ from redash.tasks import Job
|
|||||||
from redash.tasks.queries import enqueue_query
|
from redash.tasks.queries import enqueue_query
|
||||||
from redash.utils import (
|
from redash.utils import (
|
||||||
collect_parameters_from_request,
|
collect_parameters_from_request,
|
||||||
gen_query_hash,
|
|
||||||
json_dumps,
|
json_dumps,
|
||||||
utcnow,
|
utcnow,
|
||||||
to_filename,
|
to_filename,
|
||||||
@@ -61,7 +60,7 @@ error_messages = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def run_query(query, parameters, data_source, query_id, max_age=0):
|
def run_query(query, parameters, data_source, query_id, should_apply_auto_limit, max_age=0):
|
||||||
if data_source.paused:
|
if data_source.paused:
|
||||||
if data_source.pause_reason:
|
if data_source.pause_reason:
|
||||||
message = "{} is paused ({}). Please try later.".format(
|
message = "{} is paused ({}). Please try later.".format(
|
||||||
@@ -77,6 +76,8 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
|||||||
except (InvalidParameterError, QueryDetachedFromDataSourceError) as e:
|
except (InvalidParameterError, QueryDetachedFromDataSourceError) as e:
|
||||||
abort(400, message=str(e))
|
abort(400, message=str(e))
|
||||||
|
|
||||||
|
query_text = data_source.query_runner.apply_auto_limit(query.text, should_apply_auto_limit)
|
||||||
|
|
||||||
if query.missing_params:
|
if query.missing_params:
|
||||||
return error_response(
|
return error_response(
|
||||||
"Missing parameter value for: {}".format(", ".join(query.missing_params))
|
"Missing parameter value for: {}".format(", ".join(query.missing_params))
|
||||||
@@ -85,7 +86,7 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
|||||||
if max_age == 0:
|
if max_age == 0:
|
||||||
query_result = None
|
query_result = None
|
||||||
else:
|
else:
|
||||||
query_result = models.QueryResult.get_latest(data_source, query.text, max_age)
|
query_result = models.QueryResult.get_latest(data_source, query_text, max_age)
|
||||||
|
|
||||||
record_event(
|
record_event(
|
||||||
current_user.org,
|
current_user.org,
|
||||||
@@ -95,7 +96,7 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
|||||||
"cache": "hit" if query_result else "miss",
|
"cache": "hit" if query_result else "miss",
|
||||||
"object_id": data_source.id,
|
"object_id": data_source.id,
|
||||||
"object_type": "data_source",
|
"object_type": "data_source",
|
||||||
"query": query.text,
|
"query": query_text,
|
||||||
"query_id": query_id,
|
"query_id": query_id,
|
||||||
"parameters": parameters,
|
"parameters": parameters,
|
||||||
},
|
},
|
||||||
@@ -109,7 +110,7 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
|||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
job = enqueue_query(
|
job = enqueue_query(
|
||||||
query.text,
|
query_text,
|
||||||
data_source,
|
data_source,
|
||||||
current_user.id,
|
current_user.id,
|
||||||
current_user.is_api_user(),
|
current_user.is_api_user(),
|
||||||
@@ -180,6 +181,7 @@ class QueryResultListResource(BaseResource):
|
|||||||
)
|
)
|
||||||
|
|
||||||
parameterized_query = ParameterizedQuery(query, org=self.current_org)
|
parameterized_query = ParameterizedQuery(query, org=self.current_org)
|
||||||
|
should_apply_auto_limit = params.get("apply_auto_limit", False)
|
||||||
|
|
||||||
data_source_id = params.get("data_source_id")
|
data_source_id = params.get("data_source_id")
|
||||||
if data_source_id:
|
if data_source_id:
|
||||||
@@ -193,7 +195,7 @@ class QueryResultListResource(BaseResource):
|
|||||||
return error_messages["no_permission"]
|
return error_messages["no_permission"]
|
||||||
|
|
||||||
return run_query(
|
return run_query(
|
||||||
parameterized_query, parameters, data_source, query_id, max_age
|
parameterized_query, parameters, data_source, query_id, should_apply_auto_limit, max_age
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -286,6 +288,7 @@ class QueryResultResource(BaseResource):
|
|||||||
)
|
)
|
||||||
|
|
||||||
allow_executing_with_view_only_permissions = query.parameterized.is_safe
|
allow_executing_with_view_only_permissions = query.parameterized.is_safe
|
||||||
|
should_apply_auto_limit = params.get("apply_auto_limit", False)
|
||||||
|
|
||||||
if has_access(
|
if has_access(
|
||||||
query, self.current_user, allow_executing_with_view_only_permissions
|
query, self.current_user, allow_executing_with_view_only_permissions
|
||||||
@@ -295,6 +298,7 @@ class QueryResultResource(BaseResource):
|
|||||||
parameter_values,
|
parameter_values,
|
||||||
query.data_source,
|
query.data_source,
|
||||||
query_id,
|
query_id,
|
||||||
|
should_apply_auto_limit,
|
||||||
max_age,
|
max_age,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ from redash.query_runner import (
|
|||||||
TYPE_BOOLEAN,
|
TYPE_BOOLEAN,
|
||||||
TYPE_DATE,
|
TYPE_DATE,
|
||||||
TYPE_DATETIME,
|
TYPE_DATETIME,
|
||||||
)
|
BaseQueryRunner)
|
||||||
from redash.utils import (
|
from redash.utils import (
|
||||||
generate_token,
|
generate_token,
|
||||||
json_dumps,
|
json_dumps,
|
||||||
@@ -38,7 +38,7 @@ from redash.utils import (
|
|||||||
mustache_render,
|
mustache_render,
|
||||||
base_url,
|
base_url,
|
||||||
sentry,
|
sentry,
|
||||||
)
|
gen_query_hash)
|
||||||
from redash.utils.configuration import ConfigurationContainer
|
from redash.utils.configuration import ConfigurationContainer
|
||||||
from redash.models.parameterized_query import ParameterizedQuery
|
from redash.models.parameterized_query import ParameterizedQuery
|
||||||
|
|
||||||
@@ -122,6 +122,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
|||||||
"syntax": self.query_runner.syntax,
|
"syntax": self.query_runner.syntax,
|
||||||
"paused": self.paused,
|
"paused": self.paused,
|
||||||
"pause_reason": self.pause_reason,
|
"pause_reason": self.pause_reason,
|
||||||
|
"supports_auto_limit": self.query_runner.supports_auto_limit
|
||||||
}
|
}
|
||||||
|
|
||||||
if all:
|
if all:
|
||||||
@@ -358,7 +359,7 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_latest(cls, data_source, query, max_age=0):
|
def get_latest(cls, data_source, query, max_age=0):
|
||||||
query_hash = utils.gen_query_hash(query)
|
query_hash = gen_query_hash(query)
|
||||||
|
|
||||||
if max_age == -1:
|
if max_age == -1:
|
||||||
query = cls.query.filter(
|
query = cls.query.filter(
|
||||||
@@ -864,11 +865,16 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
api_keys = db.session.execute(query, {"id": self.id}).fetchall()
|
api_keys = db.session.execute(query, {"id": self.id}).fetchall()
|
||||||
return [api_key[0] for api_key in api_keys]
|
return [api_key[0] for api_key in api_keys]
|
||||||
|
|
||||||
|
def update_query_hash(self):
|
||||||
|
should_apply_auto_limit = self.options.get("apply_auto_limit", False) if self.options else False
|
||||||
|
query_runner = self.data_source.query_runner if self.data_source else BaseQueryRunner({})
|
||||||
|
self.query_hash = query_runner.gen_query_hash(self.query_text, should_apply_auto_limit)
|
||||||
|
|
||||||
@listens_for(Query.query_text, "set")
|
|
||||||
def gen_query_hash(target, val, oldval, initiator):
|
@listens_for(Query, "before_insert")
|
||||||
target.query_hash = utils.gen_query_hash(val)
|
@listens_for(Query, "before_update")
|
||||||
target.schedule_failures = 0
|
def receive_before_insert_update(mapper, connection, target):
|
||||||
|
target.update_query_hash()
|
||||||
|
|
||||||
|
|
||||||
@listens_for(Query.user_id, "set")
|
@listens_for(Query.user_id, "set")
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ from urllib.parse import urlparse
|
|||||||
|
|
||||||
from six import text_type
|
from six import text_type
|
||||||
from sshtunnel import open_tunnel
|
from sshtunnel import open_tunnel
|
||||||
from redash import settings
|
from redash import settings, utils
|
||||||
from redash.utils import json_loads
|
from redash.utils import json_loads, query_is_select_no_limit, add_limit_to_query
|
||||||
from rq.timeouts import JobTimeoutException
|
from rq.timeouts import JobTimeoutException
|
||||||
|
|
||||||
from redash.utils.requests_session import requests, requests_session
|
from redash.utils.requests_session import requests, requests_session
|
||||||
@@ -190,6 +190,17 @@ class BaseQueryRunner(object):
|
|||||||
**({"deprecated": True} if cls.deprecated else {}),
|
**({"deprecated": True} if cls.deprecated else {}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supports_auto_limit(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def apply_auto_limit(self, query_text, should_apply_auto_limit):
|
||||||
|
return query_text
|
||||||
|
|
||||||
|
def gen_query_hash(self, query_text, set_auto_limit=False):
|
||||||
|
query_text = self.apply_auto_limit(query_text, set_auto_limit)
|
||||||
|
return utils.gen_query_hash(query_text)
|
||||||
|
|
||||||
|
|
||||||
class BaseSQLQueryRunner(BaseQueryRunner):
|
class BaseSQLQueryRunner(BaseQueryRunner):
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
@@ -208,6 +219,22 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
|||||||
res = self._run_query_internal("select count(*) as cnt from %s" % t)
|
res = self._run_query_internal("select count(*) as cnt from %s" % t)
|
||||||
tables_dict[t]["size"] = res[0]["cnt"]
|
tables_dict[t]["size"] = res[0]["cnt"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supports_auto_limit(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def apply_auto_limit(self, query_text, should_apply_auto_limit):
|
||||||
|
if should_apply_auto_limit:
|
||||||
|
from redash.query_runner.databricks import split_sql_statements, combine_sql_statements
|
||||||
|
queries = split_sql_statements(query_text)
|
||||||
|
# we only check for last one in the list because it is the one that we show result
|
||||||
|
last_query = queries[-1]
|
||||||
|
if query_is_select_no_limit(last_query):
|
||||||
|
queries[-1] = add_limit_to_query(last_query)
|
||||||
|
return combine_sql_statements(queries)
|
||||||
|
else:
|
||||||
|
return query_text
|
||||||
|
|
||||||
|
|
||||||
def is_private_address(url):
|
def is_private_address(url):
|
||||||
hostname = urlparse(url).hostname
|
hostname = urlparse(url).hostname
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
enabled = False
|
enabled = False
|
||||||
|
|
||||||
|
|
||||||
TYPES_MAP = {
|
TYPES_MAP = {
|
||||||
str: TYPE_STRING,
|
str: TYPE_STRING,
|
||||||
bool: TYPE_BOOLEAN,
|
bool: TYPE_BOOLEAN,
|
||||||
@@ -83,6 +82,10 @@ def split_sql_statements(query):
|
|||||||
return [""] # if all statements were empty - return a single empty statement
|
return [""] # if all statements were empty - return a single empty statement
|
||||||
|
|
||||||
|
|
||||||
|
def combine_sql_statements(queries):
|
||||||
|
return ";\n".join(queries)
|
||||||
|
|
||||||
|
|
||||||
class Databricks(BaseSQLQueryRunner):
|
class Databricks(BaseSQLQueryRunner):
|
||||||
noop_query = "SELECT 1"
|
noop_query = "SELECT 1"
|
||||||
should_annotate_query = False
|
should_annotate_query = False
|
||||||
|
|||||||
@@ -76,6 +76,11 @@ class RefreshQueriesError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_auto_limit(query_text, query):
|
||||||
|
should_apply_auto_limit = query.options.get("apply_auto_limit", False)
|
||||||
|
return query.data_source.query_runner.apply_auto_limit(query_text, should_apply_auto_limit)
|
||||||
|
|
||||||
|
|
||||||
def refresh_queries():
|
def refresh_queries():
|
||||||
logger.info("Refreshing queries...")
|
logger.info("Refreshing queries...")
|
||||||
enqueued = []
|
enqueued = []
|
||||||
@@ -84,8 +89,10 @@ def refresh_queries():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
query_text = _apply_default_parameters(query)
|
||||||
|
query_text = _apply_auto_limit(query_text, query)
|
||||||
enqueue_query(
|
enqueue_query(
|
||||||
_apply_default_parameters(query),
|
query_text,
|
||||||
query.data_source,
|
query.data_source,
|
||||||
query.user_id,
|
query.user_id,
|
||||||
scheduled_query=query,
|
scheduled_query=query,
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import binascii
|
|||||||
import pystache
|
import pystache
|
||||||
import pytz
|
import pytz
|
||||||
import simplejson
|
import simplejson
|
||||||
|
import sqlparse
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from funcy import select_values
|
from funcy import select_values
|
||||||
from redash import settings
|
from redash import settings
|
||||||
@@ -20,7 +21,6 @@ from sqlalchemy.orm.query import Query
|
|||||||
|
|
||||||
from .human_time import parse_human_time
|
from .human_time import parse_human_time
|
||||||
|
|
||||||
|
|
||||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||||
WRITER_ENCODING = os.environ.get("REDASH_CSV_WRITER_ENCODING", "utf-8")
|
WRITER_ENCODING = os.environ.get("REDASH_CSV_WRITER_ENCODING", "utf-8")
|
||||||
WRITER_ERRORS = os.environ.get("REDASH_CSV_WRITER_ERRORS", "strict")
|
WRITER_ERRORS = os.environ.get("REDASH_CSV_WRITER_ERRORS", "strict")
|
||||||
@@ -71,7 +71,6 @@ def generate_token(length):
|
|||||||
class JSONEncoder(simplejson.JSONEncoder):
|
class JSONEncoder(simplejson.JSONEncoder):
|
||||||
"""Adapter for `simplejson.dumps`."""
|
"""Adapter for `simplejson.dumps`."""
|
||||||
|
|
||||||
|
|
||||||
def default(self, o):
|
def default(self, o):
|
||||||
# Some SQLAlchemy collections are lazy.
|
# Some SQLAlchemy collections are lazy.
|
||||||
if isinstance(o, Query):
|
if isinstance(o, Query):
|
||||||
@@ -213,3 +212,33 @@ def render_template(path, context):
|
|||||||
function decorated with the `context_processor` decorator, which is not explicitly required for rendering purposes.
|
function decorated with the `context_processor` decorator, which is not explicitly required for rendering purposes.
|
||||||
"""
|
"""
|
||||||
current_app.jinja_env.get_template(path).render(**context)
|
current_app.jinja_env.get_template(path).render(**context)
|
||||||
|
|
||||||
|
|
||||||
|
def query_is_select_no_limit(query):
|
||||||
|
parsed_query = sqlparse.parse(query)[0]
|
||||||
|
last_keyword_idx = find_last_keyword_idx(parsed_query)
|
||||||
|
# Either invalid query or query that is not select
|
||||||
|
if last_keyword_idx == -1 or parsed_query.tokens[0].value.upper() != "SELECT":
|
||||||
|
return False
|
||||||
|
|
||||||
|
no_limit = parsed_query.tokens[last_keyword_idx].value.upper() != "LIMIT" \
|
||||||
|
and parsed_query.tokens[last_keyword_idx].value.upper() != "OFFSET"
|
||||||
|
return no_limit
|
||||||
|
|
||||||
|
|
||||||
|
def find_last_keyword_idx(parsed_query):
|
||||||
|
for i in reversed(range(len(parsed_query.tokens))):
|
||||||
|
if parsed_query.tokens[i].ttype in sqlparse.tokens.Keyword:
|
||||||
|
return i
|
||||||
|
return -1
|
||||||
|
|
||||||
|
|
||||||
|
def add_limit_to_query(query):
|
||||||
|
parsed_query = sqlparse.parse(query)[0]
|
||||||
|
limit_tokens = sqlparse.parse(" LIMIT 1000")[0].tokens
|
||||||
|
length = len(parsed_query.tokens)
|
||||||
|
if parsed_query.tokens[length - 1].ttype == sqlparse.tokens.Punctuation:
|
||||||
|
parsed_query.tokens[length - 1:length - 1] = limit_tokens
|
||||||
|
else:
|
||||||
|
parsed_query.tokens += limit_tokens
|
||||||
|
return str(parsed_query)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from redash.query_runner import BaseSQLQueryRunner, BaseQueryRunner
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
from redash.models import db
|
from redash.models import db
|
||||||
@@ -76,6 +77,47 @@ class TestQueryResultListAPI(BaseTestCase):
|
|||||||
self.assertNotIn("query_result", rv.json)
|
self.assertNotIn("query_result", rv.json)
|
||||||
self.assertIn("job", rv.json)
|
self.assertIn("job", rv.json)
|
||||||
|
|
||||||
|
def test_add_limit_change_query_sql(self):
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="pg"
|
||||||
|
)
|
||||||
|
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||||
|
query_result = self.factory.create_query_result(data_source=ds, query_hash=query.query_hash)
|
||||||
|
|
||||||
|
rv = self.make_request(
|
||||||
|
"post",
|
||||||
|
"/api/query_results",
|
||||||
|
data={
|
||||||
|
"data_source_id": ds.id,
|
||||||
|
"query": query.query_text,
|
||||||
|
"apply_auto_limit": True
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(rv.status_code, 200)
|
||||||
|
self.assertNotIn("query_result", rv.json)
|
||||||
|
self.assertIn("job", rv.json)
|
||||||
|
|
||||||
|
def test_add_limit_no_change_for_nonsql(self):
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="prometheus"
|
||||||
|
)
|
||||||
|
query = self.factory.create_query(query_text="SELECT 5", data_source=ds)
|
||||||
|
query_result = self.factory.create_query_result(data_source=ds, query_hash=query.query_hash)
|
||||||
|
|
||||||
|
rv = self.make_request(
|
||||||
|
"post",
|
||||||
|
"/api/query_results",
|
||||||
|
data={
|
||||||
|
"data_source_id": ds.id,
|
||||||
|
"query": query.query_text,
|
||||||
|
"apply_auto_limit": True
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(rv.status_code, 200)
|
||||||
|
self.assertEqual(query_result.id, rv.json["query_result"]["id"])
|
||||||
|
|
||||||
def test_execute_query_without_access(self):
|
def test_execute_query_without_access(self):
|
||||||
group = self.factory.create_group()
|
group = self.factory.create_group()
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|||||||
102
tests/query_runner/test_basesql_queryrunner.py
Normal file
102
tests/query_runner/test_basesql_queryrunner.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from redash.query_runner import BaseSQLQueryRunner, BaseQueryRunner
|
||||||
|
from redash.utils import gen_query_hash
|
||||||
|
|
||||||
|
|
||||||
|
class TestBaseSQLQueryRunner(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.query_runner = BaseSQLQueryRunner({})
|
||||||
|
|
||||||
|
def test_apply_auto_limit_origin_no_limit_1(self):
|
||||||
|
origin_query_text = "SELECT 2"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual("SELECT 2 LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_origin_have_limit_1(self):
|
||||||
|
origin_query_text = "SELECT 2 LIMIT 100"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text, query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_origin_have_limit_2(self):
|
||||||
|
origin_query_text = "SELECT * FROM fake WHERE id IN (SELECT id FROM fake_2 LIMIT 200) LIMIT 200"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text, query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_origin_no_limit_2(self):
|
||||||
|
origin_query_text = "SELECT * FROM fake WHERE id IN (SELECT id FROM fake_2 LIMIT 200)"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text + " LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_non_select_query(self):
|
||||||
|
origin_query_text = ("create table execution_times as "
|
||||||
|
"(select id, retrieved_at, data_source_id, query, runtime, query_hash "
|
||||||
|
"from query_results order by 1 desc)")
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text, query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_error_query(self):
|
||||||
|
origin_query_text = "dklsk jdhsajhdiwc kkdsakjdwi mdklsjal"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text, query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_multi_query_add_limit_1(self):
|
||||||
|
origin_query_text = ("insert into execution_times (id, retrieved_at, data_source_id, query, runtime, query_hash) "
|
||||||
|
"select id, retrieved_at, data_source_id, query, runtime, query_hash from query_results "
|
||||||
|
"where id > (select max(id) from execution_times);\n"
|
||||||
|
"select max(id), 'execution_times' as table_name from execution_times "
|
||||||
|
"union all "
|
||||||
|
"select max(id), 'query_results' as table_name from query_results")
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text + " LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_multi_query_add_limit_2(self):
|
||||||
|
origin_query_text = "use database demo;\n" \
|
||||||
|
"select * from data"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text + " LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_multi_query_end_with_punc(self):
|
||||||
|
origin_query_text = ("select * from table1;\n"
|
||||||
|
"select * from table2")
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual("select * from table1;\nselect * from table2 LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_multi_query_last_not_select(self):
|
||||||
|
origin_query_text = ("select * from table1;\n"
|
||||||
|
"CREATE TABLE Persons (PersonID int)")
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual(origin_query_text, query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_last_command_comment(self):
|
||||||
|
origin_query_text = "select * from raw_events; # comment"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual("select * from raw_events LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_last_command_comment_2(self):
|
||||||
|
origin_query_text = "select * from raw_events; -- comment"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual("select * from raw_events LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_apply_auto_limit_inline_comment(self):
|
||||||
|
origin_query_text = "select * from raw_events -- comment"
|
||||||
|
query_text = self.query_runner.apply_auto_limit(origin_query_text, True)
|
||||||
|
self.assertEqual("select * from raw_events LIMIT 1000", query_text)
|
||||||
|
|
||||||
|
def test_gen_query_hash_baseSQL(self):
|
||||||
|
origin_query_text = "select *"
|
||||||
|
expected_query_text = "select * LIMIT 1000"
|
||||||
|
base_runner = BaseQueryRunner({})
|
||||||
|
self.assertEqual(base_runner.gen_query_hash(expected_query_text),
|
||||||
|
self.query_runner.gen_query_hash(origin_query_text, True))
|
||||||
|
|
||||||
|
def test_gen_query_hash_NoneSQL(self):
|
||||||
|
origin_query_text = "select *"
|
||||||
|
base_runner = BaseQueryRunner({})
|
||||||
|
self.assertEqual(gen_query_hash(origin_query_text),
|
||||||
|
base_runner.gen_query_hash(origin_query_text, True))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
@@ -7,14 +7,53 @@ ENQUEUE_QUERY = "redash.tasks.queries.maintenance.enqueue_query"
|
|||||||
|
|
||||||
|
|
||||||
class TestRefreshQuery(BaseTestCase):
|
class TestRefreshQuery(BaseTestCase):
|
||||||
def test_enqueues_outdated_queries(self):
|
def test_enqueues_outdated_queries_for_sqlquery(self):
|
||||||
"""
|
"""
|
||||||
refresh_queries() launches an execution task for each query returned
|
refresh_queries() launches an execution task for each query returned
|
||||||
from Query.outdated_queries().
|
from Query.outdated_queries().
|
||||||
"""
|
"""
|
||||||
query1 = self.factory.create_query()
|
query1 = self.factory.create_query(options={"apply_auto_limit": True})
|
||||||
query2 = self.factory.create_query(
|
query2 = self.factory.create_query(
|
||||||
query_text="select 42;", data_source=self.factory.create_data_source()
|
query_text="select 42;", data_source=self.factory.create_data_source(),
|
||||||
|
options={"apply_auto_limit": True}
|
||||||
|
)
|
||||||
|
oq = staticmethod(lambda: [query1, query2])
|
||||||
|
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||||
|
Query, "outdated_queries", oq
|
||||||
|
):
|
||||||
|
refresh_queries()
|
||||||
|
self.assertEqual(add_job_mock.call_count, 2)
|
||||||
|
add_job_mock.assert_has_calls(
|
||||||
|
[
|
||||||
|
call(
|
||||||
|
query1.query_text + " LIMIT 1000",
|
||||||
|
query1.data_source,
|
||||||
|
query1.user_id,
|
||||||
|
scheduled_query=query1,
|
||||||
|
metadata=ANY,
|
||||||
|
),
|
||||||
|
call(
|
||||||
|
"select 42 LIMIT 1000",
|
||||||
|
query2.data_source,
|
||||||
|
query2.user_id,
|
||||||
|
scheduled_query=query2,
|
||||||
|
metadata=ANY,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
any_order=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_enqueues_outdated_queries_for_non_sqlquery(self):
|
||||||
|
"""
|
||||||
|
refresh_queries() launches an execution task for each query returned
|
||||||
|
from Query.outdated_queries().
|
||||||
|
"""
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="prometheus"
|
||||||
|
)
|
||||||
|
query1 = self.factory.create_query(data_source=ds, options={"apply_auto_limit": True})
|
||||||
|
query2 = self.factory.create_query(
|
||||||
|
query_text="select 42;", data_source=ds, options={"apply_auto_limit": True}
|
||||||
)
|
)
|
||||||
oq = staticmethod(lambda: [query1, query2])
|
oq = staticmethod(lambda: [query1, query2])
|
||||||
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||||
@@ -42,12 +81,40 @@ class TestRefreshQuery(BaseTestCase):
|
|||||||
any_order=True,
|
any_order=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_doesnt_enqueue_outdated_queries_for_paused_data_source(self):
|
def test_doesnt_enqueue_outdated_queries_for_paused_data_source_for_sqlquery(self):
|
||||||
"""
|
"""
|
||||||
refresh_queries() does not launch execution tasks for queries whose
|
refresh_queries() does not launch execution tasks for queries whose
|
||||||
data source is paused.
|
data source is paused.
|
||||||
"""
|
"""
|
||||||
query = self.factory.create_query()
|
query = self.factory.create_query(options={"apply_auto_limit": True})
|
||||||
|
oq = staticmethod(lambda: [query])
|
||||||
|
query.data_source.pause()
|
||||||
|
with patch.object(Query, "outdated_queries", oq):
|
||||||
|
with patch(ENQUEUE_QUERY) as add_job_mock:
|
||||||
|
refresh_queries()
|
||||||
|
add_job_mock.assert_not_called()
|
||||||
|
|
||||||
|
query.data_source.resume()
|
||||||
|
|
||||||
|
with patch(ENQUEUE_QUERY) as add_job_mock:
|
||||||
|
refresh_queries()
|
||||||
|
add_job_mock.assert_called_with(
|
||||||
|
query.query_text + " LIMIT 1000",
|
||||||
|
query.data_source,
|
||||||
|
query.user_id,
|
||||||
|
scheduled_query=query,
|
||||||
|
metadata=ANY,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_doesnt_enqueue_outdated_queries_for_paused_data_source_for_non_sqlquery(self):
|
||||||
|
"""
|
||||||
|
refresh_queries() does not launch execution tasks for queries whose
|
||||||
|
data source is paused.
|
||||||
|
"""
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="prometheus"
|
||||||
|
)
|
||||||
|
query = self.factory.create_query(data_source=ds, options={"apply_auto_limit": True})
|
||||||
oq = staticmethod(lambda: [query])
|
oq = staticmethod(lambda: [query])
|
||||||
query.data_source.pause()
|
query.data_source.pause()
|
||||||
with patch.object(Query, "outdated_queries", oq):
|
with patch.object(Query, "outdated_queries", oq):
|
||||||
@@ -67,7 +134,7 @@ class TestRefreshQuery(BaseTestCase):
|
|||||||
metadata=ANY,
|
metadata=ANY,
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_enqueues_parameterized_queries(self):
|
def test_enqueues_parameterized_queries_for_sqlquery(self):
|
||||||
"""
|
"""
|
||||||
Scheduled queries with parameters use saved values.
|
Scheduled queries with parameters use saved values.
|
||||||
"""
|
"""
|
||||||
@@ -82,10 +149,48 @@ class TestRefreshQuery(BaseTestCase):
|
|||||||
"value": "42",
|
"value": "42",
|
||||||
"title": "n",
|
"title": "n",
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"apply_auto_limit": True
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
oq = staticmethod(lambda: [query])
|
oq = staticmethod(lambda: [query])
|
||||||
|
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||||
|
Query, "outdated_queries", oq
|
||||||
|
):
|
||||||
|
refresh_queries()
|
||||||
|
add_job_mock.assert_called_with(
|
||||||
|
"select 42 LIMIT 1000",
|
||||||
|
query.data_source,
|
||||||
|
query.user_id,
|
||||||
|
scheduled_query=query,
|
||||||
|
metadata=ANY,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_enqueues_parameterized_queries_for_non_sqlquery(self):
|
||||||
|
"""
|
||||||
|
Scheduled queries with parameters use saved values.
|
||||||
|
"""
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="prometheus"
|
||||||
|
)
|
||||||
|
query = self.factory.create_query(
|
||||||
|
query_text="select {{n}}",
|
||||||
|
options={
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"global": False,
|
||||||
|
"type": "text",
|
||||||
|
"name": "n",
|
||||||
|
"value": "42",
|
||||||
|
"title": "n",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"apply_auto_limit": True
|
||||||
|
|
||||||
|
},
|
||||||
|
data_source=ds,
|
||||||
|
)
|
||||||
|
oq = staticmethod(lambda: [query])
|
||||||
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
with patch(ENQUEUE_QUERY) as add_job_mock, patch.object(
|
||||||
Query, "outdated_queries", oq
|
Query, "outdated_queries", oq
|
||||||
):
|
):
|
||||||
@@ -113,7 +218,8 @@ class TestRefreshQuery(BaseTestCase):
|
|||||||
"value": 42, # <-- should be text!
|
"value": 42, # <-- should be text!
|
||||||
"title": "n",
|
"title": "n",
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"apply_auto_limit": True
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
oq = staticmethod(lambda: [query])
|
oq = staticmethod(lambda: [query])
|
||||||
@@ -140,7 +246,8 @@ class TestRefreshQuery(BaseTestCase):
|
|||||||
"queryId": 100,
|
"queryId": 100,
|
||||||
"title": "n",
|
"title": "n",
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"apply_auto_limit": True
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from unittest import TestCase
|
|||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from dateutil.parser import parse as date_parse
|
from dateutil.parser import parse as date_parse
|
||||||
|
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
from redash import models, redis_connection
|
from redash import models, redis_connection
|
||||||
@@ -471,6 +472,37 @@ class TestQueryAll(BaseTestCase):
|
|||||||
qs2 = base.order_by(models.User.name.desc())
|
qs2 = base.order_by(models.User.name.desc())
|
||||||
self.assertEqual(["bob", "alice"], [q.user.name for q in qs2])
|
self.assertEqual(["bob", "alice"], [q.user.name for q in qs2])
|
||||||
|
|
||||||
|
def test_update_query_hash_basesql_with_options(self):
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="pg"
|
||||||
|
)
|
||||||
|
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||||
|
query.options = {"apply_auto_limit": True}
|
||||||
|
origin_hash = query.query_hash
|
||||||
|
query.update_query_hash()
|
||||||
|
self.assertNotEqual(origin_hash, query.query_hash)
|
||||||
|
|
||||||
|
def test_update_query_hash_basesql_no_options(self):
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="pg"
|
||||||
|
)
|
||||||
|
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||||
|
query.options = {}
|
||||||
|
origin_hash = query.query_hash
|
||||||
|
query.update_query_hash()
|
||||||
|
self.assertEqual(origin_hash, query.query_hash)
|
||||||
|
|
||||||
|
def test_update_query_hash_non_basesql(self):
|
||||||
|
ds = self.factory.create_data_source(
|
||||||
|
group=self.factory.org.default_group, type="prometheus"
|
||||||
|
)
|
||||||
|
query = self.factory.create_query(query_text="SELECT 2", data_source=ds)
|
||||||
|
query.options = {"apply_auto_limit": True}
|
||||||
|
origin_hash = query.query_hash
|
||||||
|
query.update_query_hash()
|
||||||
|
self.assertEqual(origin_hash, query.query_hash)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TestGroup(BaseTestCase):
|
class TestGroup(BaseTestCase):
|
||||||
def test_returns_groups_with_specified_names(self):
|
def test_returns_groups_with_specified_names(self):
|
||||||
|
|||||||
41
tests/utils/test_query_limit.py
Normal file
41
tests/utils/test_query_limit.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from redash.utils import query_is_select_no_limit, add_limit_to_query
|
||||||
|
|
||||||
|
|
||||||
|
class TestQueryLimit(unittest.TestCase):
|
||||||
|
def test_check_query_limit_no_limit(self):
|
||||||
|
query = "SELECT *"
|
||||||
|
self.assertEqual(True, query_is_select_no_limit(query))
|
||||||
|
|
||||||
|
def test_check_query_limit_non_select(self):
|
||||||
|
query = "Create Table (PersonID INT)"
|
||||||
|
self.assertEqual(False, query_is_select_no_limit(query))
|
||||||
|
|
||||||
|
def test_check_query_limit_invalid_1(self):
|
||||||
|
query = "OFFSET 5"
|
||||||
|
self.assertEqual(False, query_is_select_no_limit(query))
|
||||||
|
|
||||||
|
def test_check_query_limit_invalid_2(self):
|
||||||
|
query = "TABLE A FROM TABLE B"
|
||||||
|
self.assertEqual(False, query_is_select_no_limit(query))
|
||||||
|
|
||||||
|
def test_check_query_with_limit(self):
|
||||||
|
query = "SELECT * LIMIT 5"
|
||||||
|
self.assertEqual(False, query_is_select_no_limit(query))
|
||||||
|
|
||||||
|
def test_check_query_with_offset(self):
|
||||||
|
query = "SELECT * LIMIT 5 OFFSET 3"
|
||||||
|
self.assertEqual(False, query_is_select_no_limit(query))
|
||||||
|
|
||||||
|
def test_add_limit_query_no_limit(self):
|
||||||
|
query = "SELECT *"
|
||||||
|
self.assertEqual("SELECT * LIMIT 1000", add_limit_to_query(query))
|
||||||
|
|
||||||
|
def test_add_limit_query_with_punc(self):
|
||||||
|
query = "SELECT *;"
|
||||||
|
self.assertEqual("SELECT * LIMIT 1000;", add_limit_to_query(query))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
Reference in New Issue
Block a user