mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
DAT-768
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -11,4 +11,6 @@ Berksfile.lock
|
||||
redash/dump.rdb
|
||||
.env
|
||||
.ruby-version
|
||||
venv
|
||||
venv
|
||||
Gemfile
|
||||
Gemfile.lock
|
||||
@@ -339,22 +339,24 @@ class QueryResultListAPI(BaseResource):
|
||||
|
||||
parsedQuery = parsedQuery[0]
|
||||
|
||||
if parsedQuery.get_type() != 'SELECT':
|
||||
if len([x for x in parsedQuery.flatten() if x.ttype == sqlparse.tokens.DDL]):
|
||||
return {
|
||||
'job': {
|
||||
'error': 'Only SELECT statements are allowed'
|
||||
}
|
||||
}
|
||||
|
||||
# Check the type of queries executed
|
||||
for dml in [x for x in parsedQuery.flatten() if x.ttype == sqlparse.tokens.DML]:
|
||||
if dml.normalized != 'SELECT':
|
||||
return {
|
||||
'job': {
|
||||
'error': 'Only SELECT statements are allowed'
|
||||
}
|
||||
}
|
||||
|
||||
# Get table identifier
|
||||
parsedQueryTable = [t.value for t in parsedQuery[0].tokens if isinstance(t, sqlparse.sql.Identifier)]
|
||||
|
||||
# Get list of table identifiers
|
||||
parsedQueryTableList = list(itertools.chain(*[[t.value for t in i.tokens if isinstance(t, sqlparse.sql.Identifier)] for i in parsedQuery[0].tokens if isinstance(i, sqlparse.sql.IdentifierList)]))
|
||||
|
||||
# Merge the table indentifiers
|
||||
parsedTables = parsedQueryTable + parsedQueryTableList
|
||||
|
||||
parsedTables = utils.extract_table_names(parsedQuery.tokens)
|
||||
allowedTables = list(set(itertools.chain(*[g.tables for g in models.Group.select().where(models.Group.name << self.current_user.groups)])))
|
||||
|
||||
for table in parsedTables:
|
||||
|
||||
@@ -6,9 +6,22 @@ import datetime
|
||||
import json
|
||||
import re
|
||||
import hashlib
|
||||
import sqlparse
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
|
||||
def extract_table_names(tokens, tables = set()):
|
||||
tokens = [t for t in tokens if t.ttype not in (sqlparse.tokens.Whitespace, sqlparse.tokens.Newline)]
|
||||
|
||||
for i in range(len(tokens)):
|
||||
if tokens[i].is_group():
|
||||
tables.update(extract_table_names(tokens[i].tokens))
|
||||
else:
|
||||
if tokens[i].ttype == sqlparse.tokens.Keyword \
|
||||
and tokens[i].normalized in ['FROM', 'JOIN', 'LEFT JOIN', 'FULL JOIN', 'RIGHT JOIN', 'CROSS JOIN', 'INNER JOIN', 'OUTER JOIN', 'LEFT OUTER JOIN', 'RIGHT OUTER JOIN', 'FULL OUTER JOIN'] \
|
||||
and isinstance(tokens[i + 1], sqlparse.sql.Identifier):
|
||||
tables.add(tokens[i + 1].value)
|
||||
return tables
|
||||
|
||||
def gen_query_hash(sql):
|
||||
"""Returns hash of the given query after stripping all comments, line breaks and multiple
|
||||
|
||||
Reference in New Issue
Block a user